From 02c503ff237cdcd8e012a122a638295550db10a5 Mon Sep 17 00:00:00 2001 From: Martin Schwidefsky Date: Mon, 28 Nov 2016 15:50:48 +0100 Subject: s390/spinlock: use atomic primitives for spinlocks Add a couple more __atomic_xxx function to atomic_ops.h and use them to replace the compare-and-swap inlines in the spinlock code. This changes the type of the lock value from unsigned int to int. Signed-off-by: Martin Schwidefsky --- arch/s390/include/asm/atomic_ops.h | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) (limited to 'arch/s390/include/asm/atomic_ops.h') diff --git a/arch/s390/include/asm/atomic_ops.h b/arch/s390/include/asm/atomic_ops.h index ac9e2b939d04..ba6d29412344 100644 --- a/arch/s390/include/asm/atomic_ops.h +++ b/arch/s390/include/asm/atomic_ops.h @@ -111,20 +111,22 @@ __ATOMIC64_OPS(__atomic64_xor, "xgr") static inline int __atomic_cmpxchg(int *ptr, int old, int new) { - asm volatile( - " cs %[old],%[new],%[ptr]" - : [old] "+d" (old), [ptr] "+Q" (*ptr) - : [new] "d" (new) : "cc", "memory"); - return old; + return __sync_val_compare_and_swap(ptr, old, new); +} + +static inline int __atomic_cmpxchg_bool(int *ptr, int old, int new) +{ + return __sync_bool_compare_and_swap(ptr, old, new); } static inline long __atomic64_cmpxchg(long *ptr, long old, long new) { - asm volatile( - " csg %[old],%[new],%[ptr]" - : [old] "+d" (old), [ptr] "+Q" (*ptr) - : [new] "d" (new) : "cc", "memory"); - return old; + return __sync_val_compare_and_swap(ptr, old, new); +} + +static inline long __atomic64_cmpxchg_bool(long *ptr, long old, long new) +{ + return __sync_bool_compare_and_swap(ptr, old, new); } #endif /* __ARCH_S390_ATOMIC_OPS__ */ -- cgit