diff options
Diffstat (limited to 'arch/sparc/lib/atomic32.c')
| -rw-r--r-- | arch/sparc/lib/atomic32.c | 105 |
1 files changed, 84 insertions, 21 deletions
diff --git a/arch/sparc/lib/atomic32.c b/arch/sparc/lib/atomic32.c index 1d32b54089aa..8ae880ebf07a 100644 --- a/arch/sparc/lib/atomic32.c +++ b/arch/sparc/lib/atomic32.c @@ -1,3 +1,4 @@ +// SPDX-License-Identifier: GPL-2.0 /* * atomic32.c: 32-bit atomic_t implementation * @@ -27,20 +28,59 @@ static DEFINE_SPINLOCK(dummy); #endif /* SMP */ -int __atomic_add_return(int i, atomic_t *v) +#define ATOMIC_FETCH_OP(op, c_op) \ +int arch_atomic_fetch_##op(int i, atomic_t *v) \ +{ \ + int ret; \ + unsigned long flags; \ + spin_lock_irqsave(ATOMIC_HASH(v), flags); \ + \ + ret = v->counter; \ + v->counter c_op i; \ + \ + spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ + return ret; \ +} \ +EXPORT_SYMBOL(arch_atomic_fetch_##op); + +#define ATOMIC_OP_RETURN(op, c_op) \ +int arch_atomic_##op##_return(int i, atomic_t *v) \ +{ \ + int ret; \ + unsigned long flags; \ + spin_lock_irqsave(ATOMIC_HASH(v), flags); \ + \ + ret = (v->counter c_op i); \ + \ + spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ + return ret; \ +} \ +EXPORT_SYMBOL(arch_atomic_##op##_return); + +ATOMIC_OP_RETURN(add, +=) + +ATOMIC_FETCH_OP(add, +=) +ATOMIC_FETCH_OP(and, &=) +ATOMIC_FETCH_OP(or, |=) +ATOMIC_FETCH_OP(xor, ^=) + +#undef ATOMIC_FETCH_OP +#undef ATOMIC_OP_RETURN + +int arch_atomic_xchg(atomic_t *v, int new) { int ret; unsigned long flags; - spin_lock_irqsave(ATOMIC_HASH(v), flags); - - ret = (v->counter += i); + spin_lock_irqsave(ATOMIC_HASH(v), flags); + ret = v->counter; + v->counter = new; spin_unlock_irqrestore(ATOMIC_HASH(v), flags); return ret; } -EXPORT_SYMBOL(__atomic_add_return); +EXPORT_SYMBOL(arch_atomic_xchg); -int atomic_cmpxchg(atomic_t *v, int old, int new) +int arch_atomic_cmpxchg(atomic_t *v, int old, int new) { int ret; unsigned long flags; @@ -53,9 +93,9 @@ int atomic_cmpxchg(atomic_t *v, int old, int new) spin_unlock_irqrestore(ATOMIC_HASH(v), flags); return ret; } -EXPORT_SYMBOL(atomic_cmpxchg); +EXPORT_SYMBOL(arch_atomic_cmpxchg); -int __atomic_add_unless(atomic_t *v, int a, int u) +int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) { int ret; unsigned long flags; @@ -67,10 +107,10 @@ int __atomic_add_unless(atomic_t *v, int a, int u) spin_unlock_irqrestore(ATOMIC_HASH(v), flags); return ret; } -EXPORT_SYMBOL(__atomic_add_unless); +EXPORT_SYMBOL(arch_atomic_fetch_add_unless); /* Atomic operations are already serializing */ -void atomic_set(atomic_t *v, int i) +void arch_atomic_set(atomic_t *v, int i) { unsigned long flags; @@ -78,9 +118,9 @@ void atomic_set(atomic_t *v, int i) v->counter = i; spin_unlock_irqrestore(ATOMIC_HASH(v), flags); } -EXPORT_SYMBOL(atomic_set); +EXPORT_SYMBOL(arch_atomic_set); -unsigned long ___set_bit(unsigned long *addr, unsigned long mask) +unsigned long sp32___set_bit(unsigned long *addr, unsigned long mask) { unsigned long old, flags; @@ -91,9 +131,9 @@ unsigned long ___set_bit(unsigned long *addr, unsigned long mask) return old & mask; } -EXPORT_SYMBOL(___set_bit); +EXPORT_SYMBOL(sp32___set_bit); -unsigned long ___clear_bit(unsigned long *addr, unsigned long mask) +unsigned long sp32___clear_bit(unsigned long *addr, unsigned long mask) { unsigned long old, flags; @@ -104,9 +144,9 @@ unsigned long ___clear_bit(unsigned long *addr, unsigned long mask) return old & mask; } -EXPORT_SYMBOL(___clear_bit); +EXPORT_SYMBOL(sp32___clear_bit); -unsigned long ___change_bit(unsigned long *addr, unsigned long mask) +unsigned long sp32___change_bit(unsigned long *addr, unsigned long mask) { unsigned long old, flags; @@ -117,18 +157,41 @@ unsigned long ___change_bit(unsigned long *addr, unsigned long mask) return old & mask; } -EXPORT_SYMBOL(___change_bit); +EXPORT_SYMBOL(sp32___change_bit); + +#define CMPXCHG(T) \ + T __cmpxchg_##T(volatile T *ptr, T old, T new) \ + { \ + unsigned long flags; \ + T prev; \ + \ + spin_lock_irqsave(ATOMIC_HASH(ptr), flags); \ + if ((prev = *ptr) == old) \ + *ptr = new; \ + spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);\ + \ + return prev; \ + } + +CMPXCHG(u8) +CMPXCHG(u16) +CMPXCHG(u32) +CMPXCHG(u64) +EXPORT_SYMBOL(__cmpxchg_u8); +EXPORT_SYMBOL(__cmpxchg_u16); +EXPORT_SYMBOL(__cmpxchg_u32); +EXPORT_SYMBOL(__cmpxchg_u64); -unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new) +unsigned long __xchg_u32(volatile u32 *ptr, u32 new) { unsigned long flags; u32 prev; spin_lock_irqsave(ATOMIC_HASH(ptr), flags); - if ((prev = *ptr) == old) - *ptr = new; + prev = *ptr; + *ptr = new; spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags); return (unsigned long)prev; } -EXPORT_SYMBOL(__cmpxchg_u32); +EXPORT_SYMBOL(__xchg_u32); |
