diff options
| author | Mark Rutland <mark.rutland@arm.com> | 2021-05-25 15:02:11 +0100 | 
|---|---|---|
| committer | Peter Zijlstra <peterz@infradead.org> | 2021-05-26 13:20:50 +0200 | 
| commit | 82b993e8249ae3cb29c1b6eb8f6548f5748508b7 (patch) | |
| tree | 796e34d53dabf31ea804b748466cf81a7edd1e9f | |
| parent | 6988631bdfddcedc1d27f83723ea36a442f00ea1 (diff) | |
locking/atomic: cmpxchg: support ARCH_ATOMIC
We'd like all architectures to convert to ARCH_ATOMIC, as this will
enable functionality, and once all architectures are converted it will
be possible to make significant cleanups to the atomic headers.
A number of architectures use asm-generic/cmpxchg.h or
asm-generic/cmpxhg-local.h, and it's impractical to convert the headers
and all these architectures in one go. To make it possible to convert
them one-by-one, let's make the asm-generic implementation function as
either cmpxchg*() or arch_cmpxchg*() depending on whether ARCH_ATOMIC is
selected. To do this, the generic implementations are prefixed as
generic_cmpxchg_*(), and preprocessor definitions map
cmpxchg_*()/arch_cmpxchg_*() onto these as appropriate.
Once all users are moved over to ARCH_ATOMIC the ifdeffery in the header
can be simplified and/or removed entirely.
For existing users (none of which select ARCH_ATOMIC), there should be
no functional change as a result of this patch.
Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Cc: Arnd Bergmann <arnd@arndb.de>
Cc: Boqun Feng <boqun.feng@gmail.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Will Deacon <will@kernel.org>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lore.kernel.org/r/20210525140232.53872-13-mark.rutland@arm.com
| -rw-r--r-- | include/asm-generic/cmpxchg.h | 61 | 
1 files changed, 44 insertions, 17 deletions
| diff --git a/include/asm-generic/cmpxchg.h b/include/asm-generic/cmpxchg.h index b9d54c7afc52..98c931199089 100644 --- a/include/asm-generic/cmpxchg.h +++ b/include/asm-generic/cmpxchg.h @@ -14,16 +14,14 @@  #include <linux/types.h>  #include <linux/irqflags.h> -#ifndef xchg -  /*   * This function doesn't exist, so you'll get a linker error if   * something tries to do an invalidly-sized xchg().   */ -extern void __xchg_called_with_bad_pointer(void); +extern void __generic_xchg_called_with_bad_pointer(void);  static inline -unsigned long __xchg(unsigned long x, volatile void *ptr, int size) +unsigned long __generic_xchg(unsigned long x, volatile void *ptr, int size)  {  	unsigned long ret, flags; @@ -75,35 +73,64 @@ unsigned long __xchg(unsigned long x, volatile void *ptr, int size)  #endif /* CONFIG_64BIT */  	default: -		__xchg_called_with_bad_pointer(); +		__generic_xchg_called_with_bad_pointer();  		return x;  	}  } -#define xchg(ptr, x) ({							\ -	((__typeof__(*(ptr)))						\ -		__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));	\ +#define generic_xchg(ptr, x) ({							\ +	((__typeof__(*(ptr)))							\ +		__generic_xchg((unsigned long)(x), (ptr), sizeof(*(ptr))));	\  }) -#endif /* xchg */ -  /*   * Atomic compare and exchange.   */  #include <asm-generic/cmpxchg-local.h> -#ifndef cmpxchg_local -#define cmpxchg_local(ptr, o, n) ({					       \ -	((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),\ -			(unsigned long)(n), sizeof(*(ptr))));		       \ +#define generic_cmpxchg_local(ptr, o, n) ({					\ +	((__typeof__(*(ptr)))__generic_cmpxchg_local((ptr), (unsigned long)(o),	\ +			(unsigned long)(n), sizeof(*(ptr))));			\  }) + +#define generic_cmpxchg64_local(ptr, o, n) \ +	__generic_cmpxchg64_local((ptr), (o), (n)) + + +#ifdef CONFIG_ARCH_ATOMIC + +#ifndef arch_xchg +#define arch_xchg		generic_xchg +#endif + +#ifndef arch_cmpxchg_local +#define arch_cmpxchg_local	generic_cmpxchg_local +#endif + +#ifndef arch_cmpxchg64_local +#define arch_cmpxchg64_local	generic_cmpxchg64_local +#endif + +#define arch_cmpxchg		arch_cmpxchg_local +#define arch_cmpxchg64		arch_cmpxchg64_local + +#else /* CONFIG_ARCH_ATOMIC */ + +#ifndef xchg +#define xchg			generic_xchg +#endif + +#ifndef cmpxchg_local +#define cmpxchg_local		generic_cmpxchg_local  #endif  #ifndef cmpxchg64_local -#define cmpxchg64_local(ptr, o, n) __generic_cmpxchg64_local((ptr), (o), (n)) +#define cmpxchg64_local		generic_cmpxchg64_local  #endif -#define cmpxchg(ptr, o, n)	cmpxchg_local((ptr), (o), (n)) -#define cmpxchg64(ptr, o, n)	cmpxchg64_local((ptr), (o), (n)) +#define cmpxchg			cmpxchg_local +#define cmpxchg64		cmpxchg64_local + +#endif /* CONFIG_ARCH_ATOMIC */  #endif /* __ASM_GENERIC_CMPXCHG_H */ | 
