summaryrefslogtreecommitdiff
path: root/arch/s390/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/s390/include/asm/atomic.h')
-rw-r--r--arch/s390/include/asm/atomic.h206
1 files changed, 142 insertions, 64 deletions
diff --git a/arch/s390/include/asm/atomic.h b/arch/s390/include/asm/atomic.h
index 491ad53a0d4e..b36dd6a1d652 100644
--- a/arch/s390/include/asm/atomic.h
+++ b/arch/s390/include/asm/atomic.h
@@ -15,56 +15,76 @@
#include <asm/barrier.h>
#include <asm/cmpxchg.h>
-#define ATOMIC_INIT(i) { (i) }
-
-static inline int atomic_read(const atomic_t *v)
+static __always_inline int arch_atomic_read(const atomic_t *v)
{
- int c;
-
- asm volatile(
- " l %0,%1\n"
- : "=d" (c) : "Q" (v->counter));
- return c;
+ return __atomic_read(&v->counter);
}
+#define arch_atomic_read arch_atomic_read
-static inline void atomic_set(atomic_t *v, int i)
+static __always_inline void arch_atomic_set(atomic_t *v, int i)
{
- asm volatile(
- " st %1,%0\n"
- : "=Q" (v->counter) : "d" (i));
+ __atomic_set(&v->counter, i);
}
+#define arch_atomic_set arch_atomic_set
-static inline int atomic_add_return(int i, atomic_t *v)
+static __always_inline int arch_atomic_add_return(int i, atomic_t *v)
{
return __atomic_add_barrier(i, &v->counter) + i;
}
+#define arch_atomic_add_return arch_atomic_add_return
-static inline int atomic_fetch_add(int i, atomic_t *v)
+static __always_inline int arch_atomic_fetch_add(int i, atomic_t *v)
{
return __atomic_add_barrier(i, &v->counter);
}
+#define arch_atomic_fetch_add arch_atomic_fetch_add
-static inline void atomic_add(int i, atomic_t *v)
+static __always_inline void arch_atomic_add(int i, atomic_t *v)
{
-#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
- if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
- __atomic_add_const(i, &v->counter);
- return;
- }
-#endif
__atomic_add(i, &v->counter);
}
+#define arch_atomic_add arch_atomic_add
+
+static __always_inline void arch_atomic_inc(atomic_t *v)
+{
+ __atomic_add_const(1, &v->counter);
+}
+#define arch_atomic_inc arch_atomic_inc
+
+static __always_inline void arch_atomic_dec(atomic_t *v)
+{
+ __atomic_add_const(-1, &v->counter);
+}
+#define arch_atomic_dec arch_atomic_dec
+
+static __always_inline bool arch_atomic_sub_and_test(int i, atomic_t *v)
+{
+ return __atomic_add_and_test_barrier(-i, &v->counter);
+}
+#define arch_atomic_sub_and_test arch_atomic_sub_and_test
+
+static __always_inline bool arch_atomic_dec_and_test(atomic_t *v)
+{
+ return __atomic_add_const_and_test_barrier(-1, &v->counter);
+}
+#define arch_atomic_dec_and_test arch_atomic_dec_and_test
+
+static __always_inline bool arch_atomic_inc_and_test(atomic_t *v)
+{
+ return __atomic_add_const_and_test_barrier(1, &v->counter);
+}
+#define arch_atomic_inc_and_test arch_atomic_inc_and_test
-#define atomic_sub(_i, _v) atomic_add(-(int)(_i), _v)
-#define atomic_sub_return(_i, _v) atomic_add_return(-(int)(_i), _v)
-#define atomic_fetch_sub(_i, _v) atomic_fetch_add(-(int)(_i), _v)
+#define arch_atomic_sub(_i, _v) arch_atomic_add(-(int)(_i), _v)
+#define arch_atomic_sub_return(_i, _v) arch_atomic_add_return(-(int)(_i), _v)
+#define arch_atomic_fetch_sub(_i, _v) arch_atomic_fetch_add(-(int)(_i), _v)
#define ATOMIC_OPS(op) \
-static inline void atomic_##op(int i, atomic_t *v) \
+static __always_inline void arch_atomic_##op(int i, atomic_t *v) \
{ \
__atomic_##op(i, &v->counter); \
} \
-static inline int atomic_fetch_##op(int i, atomic_t *v) \
+static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
{ \
return __atomic_##op##_barrier(i, &v->counter); \
}
@@ -75,68 +95,119 @@ ATOMIC_OPS(xor)
#undef ATOMIC_OPS
-#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
+#define arch_atomic_and arch_atomic_and
+#define arch_atomic_or arch_atomic_or
+#define arch_atomic_xor arch_atomic_xor
+#define arch_atomic_fetch_and arch_atomic_fetch_and
+#define arch_atomic_fetch_or arch_atomic_fetch_or
+#define arch_atomic_fetch_xor arch_atomic_fetch_xor
-static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
+static __always_inline int arch_atomic_xchg(atomic_t *v, int new)
{
- return __atomic_cmpxchg(&v->counter, old, new);
+ return arch_xchg(&v->counter, new);
}
+#define arch_atomic_xchg arch_atomic_xchg
-#define ATOMIC64_INIT(i) { (i) }
+static __always_inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
+{
+ return arch_cmpxchg(&v->counter, old, new);
+}
+#define arch_atomic_cmpxchg arch_atomic_cmpxchg
-static inline s64 atomic64_read(const atomic64_t *v)
+static __always_inline bool arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
{
- s64 c;
+ return arch_try_cmpxchg(&v->counter, old, new);
+}
+#define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
- asm volatile(
- " lg %0,%1\n"
- : "=d" (c) : "Q" (v->counter));
- return c;
+#define ATOMIC64_INIT(i) { (i) }
+
+static __always_inline s64 arch_atomic64_read(const atomic64_t *v)
+{
+ return __atomic64_read((long *)&v->counter);
}
+#define arch_atomic64_read arch_atomic64_read
-static inline void atomic64_set(atomic64_t *v, s64 i)
+static __always_inline void arch_atomic64_set(atomic64_t *v, s64 i)
{
- asm volatile(
- " stg %1,%0\n"
- : "=Q" (v->counter) : "d" (i));
+ __atomic64_set((long *)&v->counter, i);
}
+#define arch_atomic64_set arch_atomic64_set
-static inline s64 atomic64_add_return(s64 i, atomic64_t *v)
+static __always_inline s64 arch_atomic64_add_return(s64 i, atomic64_t *v)
{
return __atomic64_add_barrier(i, (long *)&v->counter) + i;
}
+#define arch_atomic64_add_return arch_atomic64_add_return
-static inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
+static __always_inline s64 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
{
return __atomic64_add_barrier(i, (long *)&v->counter);
}
+#define arch_atomic64_fetch_add arch_atomic64_fetch_add
-static inline void atomic64_add(s64 i, atomic64_t *v)
+static __always_inline void arch_atomic64_add(s64 i, atomic64_t *v)
{
-#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
- if (__builtin_constant_p(i) && (i > -129) && (i < 128)) {
- __atomic64_add_const(i, (long *)&v->counter);
- return;
- }
-#endif
__atomic64_add(i, (long *)&v->counter);
}
+#define arch_atomic64_add arch_atomic64_add
-#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
+static __always_inline void arch_atomic64_inc(atomic64_t *v)
+{
+ __atomic64_add_const(1, (long *)&v->counter);
+}
+#define arch_atomic64_inc arch_atomic64_inc
-static inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
+static __always_inline void arch_atomic64_dec(atomic64_t *v)
{
- return __atomic64_cmpxchg((long *)&v->counter, old, new);
+ __atomic64_add_const(-1, (long *)&v->counter);
}
+#define arch_atomic64_dec arch_atomic64_dec
-#define ATOMIC64_OPS(op) \
-static inline void atomic64_##op(s64 i, atomic64_t *v) \
-{ \
- __atomic64_##op(i, (long *)&v->counter); \
-} \
-static inline long atomic64_fetch_##op(s64 i, atomic64_t *v) \
-{ \
- return __atomic64_##op##_barrier(i, (long *)&v->counter); \
+static __always_inline bool arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
+{
+ return __atomic64_add_and_test_barrier(-i, (long *)&v->counter);
+}
+#define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
+
+static __always_inline bool arch_atomic64_dec_and_test(atomic64_t *v)
+{
+ return __atomic64_add_const_and_test_barrier(-1, (long *)&v->counter);
+}
+#define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
+
+static __always_inline bool arch_atomic64_inc_and_test(atomic64_t *v)
+{
+ return __atomic64_add_const_and_test_barrier(1, (long *)&v->counter);
+}
+#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
+
+static __always_inline s64 arch_atomic64_xchg(atomic64_t *v, s64 new)
+{
+ return arch_xchg(&v->counter, new);
+}
+#define arch_atomic64_xchg arch_atomic64_xchg
+
+static __always_inline s64 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
+{
+ return arch_cmpxchg(&v->counter, old, new);
+}
+#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
+
+static __always_inline bool arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
+{
+ return arch_try_cmpxchg(&v->counter, old, new);
+}
+#define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
+
+#define ATOMIC64_OPS(op) \
+static __always_inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
+{ \
+ __atomic64_##op(i, (long *)&v->counter); \
+} \
+static __always_inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
+{ \
+ return __atomic64_##op##_barrier(i, (long *)&v->counter); \
}
ATOMIC64_OPS(and)
@@ -145,8 +216,15 @@ ATOMIC64_OPS(xor)
#undef ATOMIC64_OPS
-#define atomic64_sub_return(_i, _v) atomic64_add_return(-(s64)(_i), _v)
-#define atomic64_fetch_sub(_i, _v) atomic64_fetch_add(-(s64)(_i), _v)
-#define atomic64_sub(_i, _v) atomic64_add(-(s64)(_i), _v)
+#define arch_atomic64_and arch_atomic64_and
+#define arch_atomic64_or arch_atomic64_or
+#define arch_atomic64_xor arch_atomic64_xor
+#define arch_atomic64_fetch_and arch_atomic64_fetch_and
+#define arch_atomic64_fetch_or arch_atomic64_fetch_or
+#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
+
+#define arch_atomic64_sub_return(_i, _v) arch_atomic64_add_return(-(s64)(_i), _v)
+#define arch_atomic64_fetch_sub(_i, _v) arch_atomic64_fetch_add(-(s64)(_i), _v)
+#define arch_atomic64_sub(_i, _v) arch_atomic64_add(-(s64)(_i), _v)
#endif /* __ARCH_S390_ATOMIC__ */