diff options
Diffstat (limited to 'arch/mips/include/asm/bitops.h')
| -rw-r--r-- | arch/mips/include/asm/bitops.h | 463 |
1 files changed, 156 insertions, 307 deletions
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h index c4675957b21b..42f88452c920 100644 --- a/arch/mips/include/asm/bitops.h +++ b/arch/mips/include/asm/bitops.h @@ -13,15 +13,52 @@ #error only <linux/bitops.h> can be included directly #endif +#include <linux/bits.h> #include <linux/compiler.h> #include <linux/types.h> +#include <asm/asm.h> #include <asm/barrier.h> #include <asm/byteorder.h> /* sigh ... */ #include <asm/compiler.h> #include <asm/cpu-features.h> -#include <asm/llsc.h> #include <asm/sgidefs.h> -#include <asm/war.h> + +#define __bit_op(mem, insn, inputs...) do { \ + unsigned long __temp; \ + \ + asm volatile( \ + " .set push \n" \ + " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ + "1: " __stringify(LONG_LL) " %0, %1 \n" \ + " " insn " \n" \ + " " __stringify(LONG_SC) " %0, %1 \n" \ + " " __stringify(SC_BEQZ) " %0, 1b \n" \ + " .set pop \n" \ + : "=&r"(__temp), "+" GCC_OFF_SMALL_ASM()(mem) \ + : inputs \ + : __LLSC_CLOBBER); \ +} while (0) + +#define __test_bit_op(mem, ll_dst, insn, inputs...) ({ \ + unsigned long __orig, __temp; \ + \ + asm volatile( \ + " .set push \n" \ + " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ + "1: " __stringify(LONG_LL) " " ll_dst ", %2\n" \ + " " insn " \n" \ + " " __stringify(LONG_SC) " %1, %2 \n" \ + " " __stringify(SC_BEQZ) " %1, 1b \n" \ + " .set pop \n" \ + : "=&r"(__orig), "=&r"(__temp), \ + "+" GCC_OFF_SMALL_ASM()(mem) \ + : inputs \ + : __LLSC_CLOBBER); \ + \ + __orig; \ +}) /* * These are the "slower" versions of the functions and are in bitops.c. @@ -30,15 +67,14 @@ void __mips_set_bit(unsigned long nr, volatile unsigned long *addr); void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr); void __mips_change_bit(unsigned long nr, volatile unsigned long *addr); -int __mips_test_and_set_bit(unsigned long nr, - volatile unsigned long *addr); int __mips_test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr); int __mips_test_and_clear_bit(unsigned long nr, volatile unsigned long *addr); int __mips_test_and_change_bit(unsigned long nr, volatile unsigned long *addr); - +bool __mips_xor_is_negative_byte(unsigned long mask, + volatile unsigned long *addr); /* * set_bit - Atomically set a bit in memory @@ -52,46 +88,20 @@ int __mips_test_and_change_bit(unsigned long nr, */ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - int bit = nr & SZLONG_MASK; - unsigned long temp; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # set_bit \n" - " or %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set pop \n" - : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - do { - __asm__ __volatile__( - " " __LL "%0, %1 # set_bit \n" - " " __INS "%0, %3, %2, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (bit), "r" (~0)); - } while (unlikely(!temp)); -#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ - } else if (kernel_uses_llsc) { - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # set_bit \n" - " or %0, %2 \n" - " " __SC "%0, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit)); - } while (unlikely(!temp)); - } else + if (!kernel_uses_llsc) { __mips_set_bit(nr, addr); + return; + } + + if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) { + __bit_op(*m, __stringify(LONG_INS) " %0, %3, %2, 1", "i"(bit), "r"(~0)); + return; + } + + __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit))); } /* @@ -106,46 +116,20 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) */ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - int bit = nr & SZLONG_MASK; - unsigned long temp; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # clear_bit \n" - " and %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (~(1UL << bit))); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - do { - __asm__ __volatile__( - " " __LL "%0, %1 # clear_bit \n" - " " __INS "%0, $0, %2, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (bit)); - } while (unlikely(!temp)); -#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ - } else if (kernel_uses_llsc) { - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # clear_bit \n" - " and %0, %2 \n" - " " __SC "%0, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (~(1UL << bit))); - } while (unlikely(!temp)); - } else + if (!kernel_uses_llsc) { __mips_clear_bit(nr, addr); + return; + } + + if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) { + __bit_op(*m, __stringify(LONG_INS) " %0, $0, %2, 1", "i"(bit)); + return; + } + + __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit))); } /* @@ -173,154 +157,61 @@ static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *ad */ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # change_bit \n" - " xor %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit)); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # change_bit \n" - " xor %0, %2 \n" - " " __SC "%0, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit)); - } while (unlikely(!temp)); - } else + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + + if (!kernel_uses_llsc) { __mips_change_bit(nr, addr); + return; + } + + __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit))); } /* - * test_and_set_bit - Set a bit and return its old value + * test_and_set_bit_lock - Set a bit and return its old value * @nr: Bit to set * @addr: Address to count from * - * This operation is atomic and cannot be reordered. - * It also implies a memory barrier. + * This operation is atomic and implies acquire ordering semantics + * after the memory operation. */ -static inline int test_and_set_bit(unsigned long nr, +static inline int test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; - - smp_mb__before_llsc(); - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else - res = __mips_test_and_set_bit(nr, addr); + if (!kernel_uses_llsc) { + res = __mips_test_and_set_bit_lock(nr, addr); + } else { + orig = __test_bit_op(*m, "%0", + "or\t%1, %0, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } /* - * test_and_set_bit_lock - Set a bit and return its old value + * test_and_set_bit - Set a bit and return its old value * @nr: Bit to set * @addr: Address to count from * - * This operation is atomic and implies acquire ordering semantics - * after the memory operation. + * This operation is atomic and cannot be reordered. + * It also implies a memory barrier. */ -static inline int test_and_set_bit_lock(unsigned long nr, +static inline int test_and_set_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+m" (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else - res = __mips_test_and_set_bit_lock(nr, addr); - - smp_llsc_mb(); - - return res != 0; + smp_mb__before_atomic(); + return test_and_set_bit_lock(nr, addr); } + /* * test_and_clear_bit - Clear a bit and return its old value * @nr: Bit to clear @@ -332,69 +223,30 @@ static inline int test_and_set_bit_lock(unsigned long nr, static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; - - smp_mb__before_llsc(); + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; + smp_mb__before_atomic(); - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_clear_bit \n" - " or %2, %0, %3 \n" - " xor %2, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " " __LL "%0, %1 # test_and_clear_bit \n" - " " __EXT "%2, %0, %3, 1 \n" - " " __INS "%0, $0, %3, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "ir" (bit) - : "memory"); - } while (unlikely(!temp)); -#endif - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_clear_bit \n" - " or %2, %0, %3 \n" - " xor %2, %3 \n" - " " __SC "%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else + if (!kernel_uses_llsc) { res = __mips_test_and_clear_bit(nr, addr); + } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) { + res = __test_bit_op(*m, "%1", + __stringify(LONG_EXT) " %0, %1, %3, 1;" + __stringify(LONG_INS) " %1, $0, %3, 1", + "i"(bit)); + } else { + orig = __test_bit_op(*m, "%0", + "or\t%1, %0, %3;" + "xor\t%1, %1, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } /* @@ -408,53 +260,51 @@ static inline int test_and_clear_bit(unsigned long nr, static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; - - smp_mb__before_llsc(); + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; + smp_mb__before_atomic(); - __asm__ __volatile__( - " .set push \n" - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_change_bit \n" - " xor %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set push \n" - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_change_bit \n" - " xor %2, %0, %3 \n" - " " __SC "\t%2, %1 \n" - " .set pop \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else + if (!kernel_uses_llsc) { res = __mips_test_and_change_bit(nr, addr); + } else { + orig = __test_bit_op(*m, "%0", + "xor\t%1, %0, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } + + smp_llsc_mb(); + + return res; +} + +static inline bool xor_unlock_is_negative_byte(unsigned long mask, + volatile unsigned long *p) +{ + unsigned long orig; + bool res; + + smp_mb__before_atomic(); + + if (!kernel_uses_llsc) { + res = __mips_xor_is_negative_byte(mask, p); + } else { + orig = __test_bit_op(*p, "%0", + "xor\t%1, %0, %3", + "ir"(mask)); + res = (orig & BIT(7)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } +#undef __bit_op +#undef __test_bit_op + #include <asm-generic/bitops/non-atomic.h> /* @@ -477,7 +327,7 @@ static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long * * Return the bit position (0..63) of the most significant 1 bit in a word * Returns -1 if no 1 bit exists */ -static inline unsigned long __fls(unsigned long word) +static __always_inline __attribute_const__ unsigned long __fls(unsigned long word) { int num; @@ -543,7 +393,7 @@ static inline unsigned long __fls(unsigned long word) * Returns 0..SZLONG-1 * Undefined if no bit exists, so code should check against 0 first. */ -static inline unsigned long __ffs(unsigned long word) +static __always_inline __attribute_const__ unsigned long __ffs(unsigned long word) { return __fls(word & -word); } @@ -555,7 +405,7 @@ static inline unsigned long __ffs(unsigned long word) * This is defined the same way as ffs. * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. */ -static inline int fls(unsigned int x) +static inline __attribute_const__ int fls(unsigned int x) { int r; @@ -606,9 +456,9 @@ static inline int fls(unsigned int x) * * This is defined the same way as * the libc and compiler builtin ffs routines, therefore - * differs in spirit from the above ffz (man ffs). + * differs in spirit from the below ffz (man ffs). */ -static inline int ffs(int word) +static inline __attribute_const__ int ffs(int word) { if (!word) return 0; @@ -617,7 +467,6 @@ static inline int ffs(int word) } #include <asm-generic/bitops/ffz.h> -#include <asm-generic/bitops/find.h> #ifdef __KERNEL__ |
