diff options
Diffstat (limited to 'arch/s390/include/asm/atomic.h')
| -rw-r--r-- | arch/s390/include/asm/atomic.h | 270 |
1 files changed, 142 insertions, 128 deletions
diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h index f7f69dfd2db2..b36dd6a1d652 100644 --- a/arch/s390/include/asm/atomic.h +++ b/arch/s390/include/asm/atomic.h @@ -1,3 +1,4 @@ +/* SPDX-License-Identifier: GPL-2.0 */ /* * Copyright IBM Corp. 1999, 2016 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>, @@ -14,64 +15,76 @@ #include <asm/barrier.h> #include <asm/cmpxchg.h> -#define ATOMIC_INIT(i) { (i) } - -static inline int atomic_read(const atomic_t *v) +static __always_inline int arch_atomic_read(const atomic_t *v) { - int c; - - asm volatile( - " l %0,%1\n" - : "=d" (c) : "Q" (v->counter)); - return c; + return __atomic_read(&v->counter); } +#define arch_atomic_read arch_atomic_read -static inline void atomic_set(atomic_t *v, int i) +static __always_inline void arch_atomic_set(atomic_t *v, int i) { - asm volatile( - " st %1,%0\n" - : "=Q" (v->counter) : "d" (i)); + __atomic_set(&v->counter, i); } +#define arch_atomic_set arch_atomic_set -static inline int atomic_add_return(int i, atomic_t *v) +static __always_inline int arch_atomic_add_return(int i, atomic_t *v) { return __atomic_add_barrier(i, &v->counter) + i; } +#define arch_atomic_add_return arch_atomic_add_return -static inline int atomic_fetch_add(int i, atomic_t *v) +static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v) { return __atomic_add_barrier(i, &v->counter); } +#define arch_atomic_fetch_add arch_atomic_fetch_add -static inline void atomic_add(int i, atomic_t *v) +static __always_inline void arch_atomic_add(int i, atomic_t *v) { -#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES - if (__builtin_constant_p(i) && (i > -129) && (i < 128)) { - __atomic_add_const(i, &v->counter); - return; - } -#endif __atomic_add(i, &v->counter); } +#define arch_atomic_add arch_atomic_add + +static __always_inline void arch_atomic_inc(atomic_t *v) +{ + __atomic_add_const(1, &v->counter); +} +#define arch_atomic_inc arch_atomic_inc + +static __always_inline void arch_atomic_dec(atomic_t *v) +{ + __atomic_add_const(-1, &v->counter); +} +#define arch_atomic_dec arch_atomic_dec + +static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v) +{ + return __atomic_add_and_test_barrier(-i, &v->counter); +} +#define arch_atomic_sub_and_test arch_atomic_sub_and_test -#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0) -#define atomic_inc(_v) atomic_add(1, _v) -#define atomic_inc_return(_v) atomic_add_return(1, _v) -#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0) -#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v) -#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v) -#define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v) -#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0) -#define atomic_dec(_v) atomic_sub(1, _v) -#define atomic_dec_return(_v) atomic_sub_return(1, _v) -#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0) +static __always_inline bool arch_atomic_dec_and_test(atomic_t *v) +{ + return __atomic_add_const_and_test_barrier(-1, &v->counter); +} +#define arch_atomic_dec_and_test arch_atomic_dec_and_test + +static __always_inline bool arch_atomic_inc_and_test(atomic_t *v) +{ + return __atomic_add_const_and_test_barrier(1, &v->counter); +} +#define arch_atomic_inc_and_test arch_atomic_inc_and_test + +#define arch_atomic_sub(_i, _v) arch_atomic_add(-(int)(_i), _v) +#define arch_atomic_sub_return(_i, _v) arch_atomic_add_return(-(int)(_i), _v) +#define arch_atomic_fetch_sub(_i, _v) arch_atomic_fetch_add(-(int)(_i), _v) #define ATOMIC_OPS(op) \ -static inline void atomic_##op(int i, atomic_t *v) \ +static __always_inline void arch_atomic_##op(int i, atomic_t *v) \ { \ __atomic_##op(i, &v->counter); \ } \ -static inline int atomic_fetch_##op(int i, atomic_t *v) \ +static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \ { \ return __atomic_##op##_barrier(i, &v->counter); \ } @@ -82,83 +95,119 @@ ATOMIC_OPS(xor) #undef ATOMIC_OPS -#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) +#define arch_atomic_and arch_atomic_and +#define arch_atomic_or arch_atomic_or +#define arch_atomic_xor arch_atomic_xor +#define arch_atomic_fetch_and arch_atomic_fetch_and +#define arch_atomic_fetch_or arch_atomic_fetch_or +#define arch_atomic_fetch_xor arch_atomic_fetch_xor -static inline int atomic_cmpxchg(atomic_t *v, int old, int new) +static __always_inline int arch_atomic_xchg(atomic_t *v, int new) { - return __atomic_cmpxchg(&v->counter, old, new); + return arch_xchg(&v->counter, new); } +#define arch_atomic_xchg arch_atomic_xchg -static inline int __atomic_add_unless(atomic_t *v, int a, int u) +static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new) { - int c, old; - c = atomic_read(v); - for (;;) { - if (unlikely(c == u)) - break; - old = atomic_cmpxchg(v, c, c + a); - if (likely(old == c)) - break; - c = old; - } - return c; + return arch_cmpxchg(&v->counter, old, new); } +#define arch_atomic_cmpxchg arch_atomic_cmpxchg + +static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new) +{ + return arch_try_cmpxchg(&v->counter, old, new); +} +#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg #define ATOMIC64_INIT(i) { (i) } -static inline long atomic64_read(const atomic64_t *v) +static __always_inline s64 arch_atomic64_read(const atomic64_t *v) { - long c; + return __atomic64_read((long *)&v->counter); +} +#define arch_atomic64_read arch_atomic64_read - asm volatile( - " lg %0,%1\n" - : "=d" (c) : "Q" (v->counter)); - return c; +static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i) +{ + __atomic64_set((long *)&v->counter, i); } +#define arch_atomic64_set arch_atomic64_set -static inline void atomic64_set(atomic64_t *v, long i) +static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v) { - asm volatile( - " stg %1,%0\n" - : "=Q" (v->counter) : "d" (i)); + return __atomic64_add_barrier(i, (long *)&v->counter) + i; } +#define arch_atomic64_add_return arch_atomic64_add_return -static inline long atomic64_add_return(long i, atomic64_t *v) +static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v) { - return __atomic64_add_barrier(i, &v->counter) + i; + return __atomic64_add_barrier(i, (long *)&v->counter); } +#define arch_atomic64_fetch_add arch_atomic64_fetch_add -static inline long atomic64_fetch_add(long i, atomic64_t *v) +static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v) { - return __atomic64_add_barrier(i, &v->counter); + __atomic64_add(i, (long *)&v->counter); } +#define arch_atomic64_add arch_atomic64_add -static inline void atomic64_add(long i, atomic64_t *v) +static __always_inline void arch_atomic64_inc(atomic64_t *v) { -#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES - if (__builtin_constant_p(i) && (i > -129) && (i < 128)) { - __atomic64_add_const(i, &v->counter); - return; - } -#endif - __atomic64_add(i, &v->counter); + __atomic64_add_const(1, (long *)&v->counter); } +#define arch_atomic64_inc arch_atomic64_inc -#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) +static __always_inline void arch_atomic64_dec(atomic64_t *v) +{ + __atomic64_add_const(-1, (long *)&v->counter); +} +#define arch_atomic64_dec arch_atomic64_dec -static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new) +static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v) { - return __atomic64_cmpxchg(&v->counter, old, new); + return __atomic64_add_and_test_barrier(-i, (long *)&v->counter); } +#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test -#define ATOMIC64_OPS(op) \ -static inline void atomic64_##op(long i, atomic64_t *v) \ -{ \ - __atomic64_##op(i, &v->counter); \ -} \ -static inline long atomic64_fetch_##op(long i, atomic64_t *v) \ -{ \ - return __atomic64_##op##_barrier(i, &v->counter); \ +static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v) +{ + return __atomic64_add_const_and_test_barrier(-1, (long *)&v->counter); +} +#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test + +static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v) +{ + return __atomic64_add_const_and_test_barrier(1, (long *)&v->counter); +} +#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test + +static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new) +{ + return arch_xchg(&v->counter, new); +} +#define arch_atomic64_xchg arch_atomic64_xchg + +static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) +{ + return arch_cmpxchg(&v->counter, old, new); +} +#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg + +static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) +{ + return arch_try_cmpxchg(&v->counter, old, new); +} +#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg + +#define ATOMIC64_OPS(op) \ +static __always_inline void arch_atomic64_##op(s64 i, atomic64_t *v) \ +{ \ + __atomic64_##op(i, (long *)&v->counter); \ +} \ +static __always_inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \ +{ \ + return __atomic64_##op##_barrier(i, (long *)&v->counter); \ } ATOMIC64_OPS(and) @@ -167,50 +216,15 @@ ATOMIC64_OPS(xor) #undef ATOMIC64_OPS -static inline int atomic64_add_unless(atomic64_t *v, long i, long u) -{ - long c, old; - - c = atomic64_read(v); - for (;;) { - if (unlikely(c == u)) - break; - old = atomic64_cmpxchg(v, c, c + i); - if (likely(old == c)) - break; - c = old; - } - return c != u; -} - -static inline long atomic64_dec_if_positive(atomic64_t *v) -{ - long c, old, dec; - - c = atomic64_read(v); - for (;;) { - dec = c - 1; - if (unlikely(dec < 0)) - break; - old = atomic64_cmpxchg((v), c, dec); - if (likely(old == c)) - break; - c = old; - } - return dec; -} - -#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0) -#define atomic64_inc(_v) atomic64_add(1, _v) -#define atomic64_inc_return(_v) atomic64_add_return(1, _v) -#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0) -#define atomic64_sub_return(_i, _v) atomic64_add_return(-(long)(_i), _v) -#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(long)(_i), _v) -#define atomic64_sub(_i, _v) atomic64_add(-(long)(_i), _v) -#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0) -#define atomic64_dec(_v) atomic64_sub(1, _v) -#define atomic64_dec_return(_v) atomic64_sub_return(1, _v) -#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0) -#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) +#define arch_atomic64_and arch_atomic64_and +#define arch_atomic64_or arch_atomic64_or +#define arch_atomic64_xor arch_atomic64_xor +#define arch_atomic64_fetch_and arch_atomic64_fetch_and +#define arch_atomic64_fetch_or arch_atomic64_fetch_or +#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor + +#define arch_atomic64_sub_return(_i, _v) arch_atomic64_add_return(-(s64)(_i), _v) +#define arch_atomic64_fetch_sub(_i, _v) arch_atomic64_fetch_add(-(s64)(_i), _v) +#define arch_atomic64_sub(_i, _v) arch_atomic64_add(-(s64)(_i), _v) #endif /* __ARCH_S390_ATOMIC__ */ |
