summaryrefslogtreecommitdiff
path: root/arch/alpha/include/asm/bitops.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/alpha/include/asm/bitops.h')
-rw-r--r--arch/alpha/include/asm/bitops.h73
1 files changed, 44 insertions, 29 deletions
diff --git a/arch/alpha/include/asm/bitops.h b/arch/alpha/include/asm/bitops.h
index a19ba5efea4c..76e4343c090f 100644
--- a/arch/alpha/include/asm/bitops.h
+++ b/arch/alpha/include/asm/bitops.h
@@ -1,3 +1,4 @@
+/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ALPHA_BITOPS_H
#define _ALPHA_BITOPS_H
@@ -45,17 +46,14 @@ set_bit(unsigned long nr, volatile void * addr)
/*
* WARNING: non atomic version.
*/
-static inline void
-__set_bit(unsigned long nr, volatile void * addr)
+static __always_inline void
+arch___set_bit(unsigned long nr, volatile unsigned long *addr)
{
int *m = ((int *) addr) + (nr >> 5);
*m |= 1 << (nr & 31);
}
-#define smp_mb__before_clear_bit() smp_mb()
-#define smp_mb__after_clear_bit() smp_mb()
-
static inline void
clear_bit(unsigned long nr, volatile void * addr)
{
@@ -84,8 +82,8 @@ clear_bit_unlock(unsigned long nr, volatile void * addr)
/*
* WARNING: non atomic version.
*/
-static __inline__ void
-__clear_bit(unsigned long nr, volatile void * addr)
+static __always_inline void
+arch___clear_bit(unsigned long nr, volatile unsigned long *addr)
{
int *m = ((int *) addr) + (nr >> 5);
@@ -96,7 +94,7 @@ static inline void
__clear_bit_unlock(unsigned long nr, volatile void * addr)
{
smp_mb();
- __clear_bit(nr, addr);
+ arch___clear_bit(nr, addr);
}
static inline void
@@ -120,8 +118,8 @@ change_bit(unsigned long nr, volatile void * addr)
/*
* WARNING: non atomic version.
*/
-static __inline__ void
-__change_bit(unsigned long nr, volatile void * addr)
+static __always_inline void
+arch___change_bit(unsigned long nr, volatile unsigned long *addr)
{
int *m = ((int *) addr) + (nr >> 5);
@@ -188,8 +186,8 @@ test_and_set_bit_lock(unsigned long nr, volatile void *addr)
/*
* WARNING: non atomic version.
*/
-static inline int
-__test_and_set_bit(unsigned long nr, volatile void * addr)
+static __always_inline bool
+arch___test_and_set_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned long mask = 1 << (nr & 0x1f);
int *m = ((int *) addr) + (nr >> 5);
@@ -232,8 +230,8 @@ test_and_clear_bit(unsigned long nr, volatile void * addr)
/*
* WARNING: non atomic version.
*/
-static inline int
-__test_and_clear_bit(unsigned long nr, volatile void * addr)
+static __always_inline bool
+arch___test_and_clear_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned long mask = 1 << (nr & 0x1f);
int *m = ((int *) addr) + (nr >> 5);
@@ -274,8 +272,8 @@ test_and_change_bit(unsigned long nr, volatile void * addr)
/*
* WARNING: non atomic version.
*/
-static __inline__ int
-__test_and_change_bit(unsigned long nr, volatile void * addr)
+static __always_inline bool
+arch___test_and_change_bit(unsigned long nr, volatile unsigned long *addr)
{
unsigned long mask = 1 << (nr & 0x1f);
int *m = ((int *) addr) + (nr >> 5);
@@ -285,10 +283,27 @@ __test_and_change_bit(unsigned long nr, volatile void * addr)
return (old & mask) != 0;
}
-static inline int
-test_bit(int nr, const volatile void * addr)
+#define arch_test_bit generic_test_bit
+#define arch_test_bit_acquire generic_test_bit_acquire
+
+static inline bool xor_unlock_is_negative_byte(unsigned long mask,
+ volatile unsigned long *p)
{
- return (1UL & (((const int *) addr)[nr >> 5] >> (nr & 31))) != 0UL;
+ unsigned long temp, old;
+
+ __asm__ __volatile__(
+ "1: ldl_l %0,%4\n"
+ " mov %0,%2\n"
+ " xor %0,%3,%0\n"
+ " stl_c %0,%1\n"
+ " beq %0,2f\n"
+ ".subsection 2\n"
+ "2: br 1b\n"
+ ".previous"
+ :"=&r" (temp), "=m" (*p), "=&r" (old)
+ :"Ir" (mask), "m" (*p));
+
+ return (old & BIT(7)) != 0;
}
/*
@@ -313,7 +328,7 @@ static inline unsigned long ffz_b(unsigned long x)
return sum;
}
-static inline unsigned long ffz(unsigned long word)
+static inline unsigned long __attribute_const__ ffz(unsigned long word)
{
#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
/* Whee. EV67 can calculate it directly. */
@@ -333,7 +348,7 @@ static inline unsigned long ffz(unsigned long word)
/*
* __ffs = Find First set bit in word. Undefined if no set bit exists.
*/
-static inline unsigned long __ffs(unsigned long word)
+static inline __attribute_const__ unsigned long __ffs(unsigned long word)
{
#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
/* Whee. EV67 can calculate it directly. */
@@ -358,7 +373,7 @@ static inline unsigned long __ffs(unsigned long word)
* differs in spirit from the above __ffs.
*/
-static inline int ffs(int word)
+static inline __attribute_const__ int ffs(int word)
{
int result = __ffs(word) + 1;
return word ? result : 0;
@@ -368,14 +383,14 @@ static inline int ffs(int word)
* fls: find last bit set.
*/
#if defined(CONFIG_ALPHA_EV6) && defined(CONFIG_ALPHA_EV67)
-static inline int fls64(unsigned long word)
+static inline __attribute_const__ int fls64(unsigned long word)
{
return 64 - __kernel_ctlz(word);
}
#else
extern const unsigned char __flsm1_tab[256];
-static inline int fls64(unsigned long x)
+static inline __attribute_const__ int fls64(unsigned long x)
{
unsigned long t, a, r;
@@ -388,14 +403,14 @@ static inline int fls64(unsigned long x)
}
#endif
-static inline unsigned long __fls(unsigned long x)
+static inline __attribute_const__ unsigned long __fls(unsigned long x)
{
return fls64(x) - 1;
}
-static inline int fls(int x)
+static inline __attribute_const__ int fls(unsigned int x)
{
- return fls64((unsigned int) x);
+ return fls64(x);
}
/*
@@ -432,8 +447,6 @@ static inline unsigned int __arch_hweight8(unsigned int w)
#endif /* __KERNEL__ */
-#include <asm-generic/bitops/find.h>
-
#ifdef __KERNEL__
/*
@@ -454,6 +467,8 @@ sched_find_first_bit(const unsigned long b[2])
return __ffs(tmp) + ofs;
}
+#include <asm-generic/bitops/non-instrumented-non-atomic.h>
+
#include <asm-generic/bitops/le.h>
#include <asm-generic/bitops/ext2-atomic-setbit.h>