summaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm
diff options
context:
space:
mode:
authorWill Deacon <will@kernel.org>2019-08-29 14:33:23 +0100
committerWill Deacon <will@kernel.org>2019-08-30 11:18:37 +0100
commit5aad6cdabbf91fd330bd216fe3c93d90f78bc7e7 (patch)
tree059fe136b852abbacb2121935c91c1f70033c77d /arch/arm64/include/asm
parentb32baf91f60fb9c7010bff87e68132f2ce31d9a8 (diff)
arm64: atomics: Undefine internal macros after use
We use a bunch of internal macros when constructing our atomic and cmpxchg routines in order to save on boilerplate. Avoid exposing these directly to users of the header files. Reviewed-by: Andrew Murray <andrew.murray@arm.com> Signed-off-by: Will Deacon <will@kernel.org>
Diffstat (limited to 'arch/arm64/include/asm')
-rw-r--r--arch/arm64/include/asm/atomic.h7
-rw-r--r--arch/arm64/include/asm/cmpxchg.h4
2 files changed, 11 insertions, 0 deletions
diff --git a/arch/arm64/include/asm/atomic.h b/arch/arm64/include/asm/atomic.h
index 7c334337674d..916e5a6d5454 100644
--- a/arch/arm64/include/asm/atomic.h
+++ b/arch/arm64/include/asm/atomic.h
@@ -32,6 +32,7 @@ ATOMIC_OP(atomic_add)
ATOMIC_OP(atomic_and)
ATOMIC_OP(atomic_sub)
+#undef ATOMIC_OP
#define ATOMIC_FETCH_OP(name, op) \
static inline int arch_##op##name(int i, atomic_t *v) \
@@ -54,6 +55,8 @@ ATOMIC_FETCH_OPS(atomic_fetch_sub)
ATOMIC_FETCH_OPS(atomic_add_return)
ATOMIC_FETCH_OPS(atomic_sub_return)
+#undef ATOMIC_FETCH_OP
+#undef ATOMIC_FETCH_OPS
#define ATOMIC64_OP(op) \
static inline void arch_##op(long i, atomic64_t *v) \
@@ -68,6 +71,7 @@ ATOMIC64_OP(atomic64_add)
ATOMIC64_OP(atomic64_and)
ATOMIC64_OP(atomic64_sub)
+#undef ATOMIC64_OP
#define ATOMIC64_FETCH_OP(name, op) \
static inline long arch_##op##name(long i, atomic64_t *v) \
@@ -90,6 +94,9 @@ ATOMIC64_FETCH_OPS(atomic64_fetch_sub)
ATOMIC64_FETCH_OPS(atomic64_add_return)
ATOMIC64_FETCH_OPS(atomic64_sub_return)
+#undef ATOMIC64_FETCH_OP
+#undef ATOMIC64_FETCH_OPS
+
static inline long arch_atomic64_dec_if_positive(atomic64_t *v)
{
return __lse_ll_sc_body(atomic64_dec_if_positive, v);
diff --git a/arch/arm64/include/asm/cmpxchg.h b/arch/arm64/include/asm/cmpxchg.h
index afaba73e0b2c..a1398f2f9994 100644
--- a/arch/arm64/include/asm/cmpxchg.h
+++ b/arch/arm64/include/asm/cmpxchg.h
@@ -129,6 +129,8 @@ __CMPXCHG_CASE(mb_, 16)
__CMPXCHG_CASE(mb_, 32)
__CMPXCHG_CASE(mb_, 64)
+#undef __CMPXCHG_CASE
+
#define __CMPXCHG_DBL(name) \
static inline long __cmpxchg_double##name(unsigned long old1, \
unsigned long old2, \
@@ -143,6 +145,8 @@ static inline long __cmpxchg_double##name(unsigned long old1, \
__CMPXCHG_DBL( )
__CMPXCHG_DBL(_mb)
+#undef __CMPXCHG_DBL
+
#define __CMPXCHG_GEN(sfx) \
static inline unsigned long __cmpxchg##sfx(volatile void *ptr, \
unsigned long old, \