summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHuang Pei <huangpei@loongson.cn>2021-12-15 16:45:00 +0800
committerThomas Bogendoerfer <tsbogend@alpha.franken.de>2022-01-05 11:16:35 +0100
commitf0b7ddbd794bdffade370f22bb7a774002208ef4 (patch)
treeb704833d6caac2e9054d4b5be36d24a0e0c859a5
parent10657660c16e689bfad204190e7031b9b1622a35 (diff)
MIPS: retire "asm/llsc.h"
all that "asm/llsc.h" does is just to help inline asm, which can be stringifyed from "asm/asm.h" +. Since "asm/asm.h" has all we need, retire "asm/llsc.h" +. remove unused header file Inspired-by: Maciej W. Rozycki <macro@orcam.me.uk> Signed-off-by: Huang Pei <huangpei@loongson.cn> Signed-off-by: Thomas Bogendoerfer <tsbogend@alpha.franken.de>
-rw-r--r--arch/mips/include/asm/asm.h4
-rw-r--r--arch/mips/include/asm/atomic.h11
-rw-r--r--arch/mips/include/asm/bitops.h24
-rw-r--r--arch/mips/include/asm/cmpxchg.h9
-rw-r--r--arch/mips/include/asm/kvm_host.h13
-rw-r--r--arch/mips/include/asm/llsc.h39
6 files changed, 31 insertions, 69 deletions
diff --git a/arch/mips/include/asm/asm.h b/arch/mips/include/asm/asm.h
index f3302b13d3e0..6ffdd4b5e1d0 100644
--- a/arch/mips/include/asm/asm.h
+++ b/arch/mips/include/asm/asm.h
@@ -222,6 +222,8 @@ symbol = value
#define LONG_SRLV srlv
#define LONG_SRA sra
#define LONG_SRAV srav
+#define LONG_INS ins
+#define LONG_EXT ext
#ifdef __ASSEMBLY__
#define LONG .word
@@ -249,6 +251,8 @@ symbol = value
#define LONG_SRLV dsrlv
#define LONG_SRA dsra
#define LONG_SRAV dsrav
+#define LONG_INS dins
+#define LONG_EXT dext
#ifdef __ASSEMBLY__
#define LONG .dword
diff --git a/arch/mips/include/asm/atomic.h b/arch/mips/include/asm/atomic.h
index a0b9e7c1e4fc..712fb5a6a568 100644
--- a/arch/mips/include/asm/atomic.h
+++ b/arch/mips/include/asm/atomic.h
@@ -16,13 +16,12 @@
#include <linux/irqflags.h>
#include <linux/types.h>
+#include <asm/asm.h>
#include <asm/barrier.h>
#include <asm/compiler.h>
#include <asm/cpu-features.h>
#include <asm/cmpxchg.h>
-#include <asm/llsc.h>
#include <asm/sync.h>
-#include <asm/war.h>
#define ATOMIC_OPS(pfx, type) \
static __always_inline type arch_##pfx##_read(const pfx##_t *v) \
@@ -74,7 +73,7 @@ static __inline__ void arch_##pfx##_##op(type i, pfx##_t * v) \
"1: " #ll " %0, %1 # " #pfx "_" #op " \n" \
" " #asm_op " %0, %2 \n" \
" " #sc " %0, %1 \n" \
- "\t" __SC_BEQZ "%0, 1b \n" \
+ "\t" __stringify(SC_BEQZ) " %0, 1b \n" \
" .set pop \n" \
: "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (v->counter) \
: "Ir" (i) : __LLSC_CLOBBER); \
@@ -104,7 +103,7 @@ arch_##pfx##_##op##_return_relaxed(type i, pfx##_t * v) \
"1: " #ll " %1, %2 # " #pfx "_" #op "_return\n" \
" " #asm_op " %0, %1, %3 \n" \
" " #sc " %0, %2 \n" \
- "\t" __SC_BEQZ "%0, 1b \n" \
+ "\t" __stringify(SC_BEQZ) " %0, 1b \n" \
" " #asm_op " %0, %1, %3 \n" \
" .set pop \n" \
: "=&r" (result), "=&r" (temp), \
@@ -137,7 +136,7 @@ arch_##pfx##_fetch_##op##_relaxed(type i, pfx##_t * v) \
"1: " #ll " %1, %2 # " #pfx "_fetch_" #op "\n" \
" " #asm_op " %0, %1, %3 \n" \
" " #sc " %0, %2 \n" \
- "\t" __SC_BEQZ "%0, 1b \n" \
+ "\t" __stringify(SC_BEQZ) " %0, 1b \n" \
" .set pop \n" \
" move %0, %1 \n" \
: "=&r" (result), "=&r" (temp), \
@@ -237,7 +236,7 @@ static __inline__ type arch_##pfx##_sub_if_positive(type i, pfx##_t * v) \
" .set push \n" \
" .set " MIPS_ISA_LEVEL " \n" \
" " #sc " %1, %2 \n" \
- " " __SC_BEQZ "%1, 1b \n" \
+ " " __stringify(SC_BEQZ) " %1, 1b \n" \
"2: " __SYNC(full, loongson3_war) " \n" \
" .set pop \n" \
: "=&r" (result), "=&r" (temp), \
diff --git a/arch/mips/include/asm/bitops.h b/arch/mips/include/asm/bitops.h
index dc2a6234dd3c..3812082b8295 100644
--- a/arch/mips/include/asm/bitops.h
+++ b/arch/mips/include/asm/bitops.h
@@ -16,14 +16,12 @@
#include <linux/bits.h>
#include <linux/compiler.h>
#include <linux/types.h>
+#include <asm/asm.h>
#include <asm/barrier.h>
#include <asm/byteorder.h> /* sigh ... */
#include <asm/compiler.h>
#include <asm/cpu-features.h>
-#include <asm/isa-rev.h>
-#include <asm/llsc.h>
#include <asm/sgidefs.h>
-#include <asm/war.h>
#define __bit_op(mem, insn, inputs...) do { \
unsigned long __temp; \
@@ -32,10 +30,10 @@
" .set push \n" \
" .set " MIPS_ISA_LEVEL " \n" \
" " __SYNC(full, loongson3_war) " \n" \
- "1: " __LL "%0, %1 \n" \
+ "1: " __stringify(LONG_LL) " %0, %1 \n" \
" " insn " \n" \
- " " __SC "%0, %1 \n" \
- " " __SC_BEQZ "%0, 1b \n" \
+ " " __stringify(LONG_SC) " %0, %1 \n" \
+ " " __stringify(SC_BEQZ) " %0, 1b \n" \
" .set pop \n" \
: "=&r"(__temp), "+" GCC_OFF_SMALL_ASM()(mem) \
: inputs \
@@ -49,10 +47,10 @@
" .set push \n" \
" .set " MIPS_ISA_LEVEL " \n" \
" " __SYNC(full, loongson3_war) " \n" \
- "1: " __LL ll_dst ", %2 \n" \
+ "1: " __stringify(LONG_LL) " " ll_dst ", %2\n" \
" " insn " \n" \
- " " __SC "%1, %2 \n" \
- " " __SC_BEQZ "%1, 1b \n" \
+ " " __stringify(LONG_SC) " %1, %2 \n" \
+ " " __stringify(SC_BEQZ) " %1, 1b \n" \
" .set pop \n" \
: "=&r"(__orig), "=&r"(__temp), \
"+" GCC_OFF_SMALL_ASM()(mem) \
@@ -98,7 +96,7 @@ static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
}
if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) {
- __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0));
+ __bit_op(*m, __stringify(LONG_INS) " %0, %3, %2, 1", "i"(bit), "r"(~0));
return;
}
@@ -126,7 +124,7 @@ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
}
if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) {
- __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit));
+ __bit_op(*m, __stringify(LONG_INS) " %0, $0, %2, 1", "i"(bit));
return;
}
@@ -234,8 +232,8 @@ static inline int test_and_clear_bit(unsigned long nr,
res = __mips_test_and_clear_bit(nr, addr);
} else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) {
res = __test_bit_op(*m, "%1",
- __EXT "%0, %1, %3, 1;"
- __INS "%1, $0, %3, 1",
+ __stringify(LONG_EXT) " %0, %1, %3, 1;"
+ __stringify(LONG_INS) " %1, $0, %3, 1",
"i"(bit));
} else {
orig = __test_bit_op(*m, "%0",
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 66a8b293fd80..7ec9493b2861 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -10,10 +10,9 @@
#include <linux/bug.h>
#include <linux/irqflags.h>
+#include <asm/asm.h>
#include <asm/compiler.h>
-#include <asm/llsc.h>
#include <asm/sync.h>
-#include <asm/war.h>
/*
* These functions doesn't exist, so if they are called you'll either:
@@ -48,7 +47,7 @@ extern unsigned long __xchg_called_with_bad_pointer(void)
" move $1, %z3 \n" \
" .set " MIPS_ISA_ARCH_LEVEL " \n" \
" " st " $1, %1 \n" \
- "\t" __SC_BEQZ "$1, 1b \n" \
+ "\t" __stringify(SC_BEQZ) " $1, 1b \n" \
" .set pop \n" \
: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
: GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
@@ -127,7 +126,7 @@ unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
" move $1, %z4 \n" \
" .set "MIPS_ISA_ARCH_LEVEL" \n" \
" " st " $1, %1 \n" \
- "\t" __SC_BEQZ "$1, 1b \n" \
+ "\t" __stringify(SC_BEQZ) " $1, 1b \n" \
" .set pop \n" \
"2: " __SYNC(full, loongson3_war) " \n" \
: "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
@@ -282,7 +281,7 @@ static inline unsigned long __cmpxchg64(volatile void *ptr,
/* Attempt to store new at ptr */
" scd %L1, %2 \n"
/* If we failed, loop! */
- "\t" __SC_BEQZ "%L1, 1b \n"
+ "\t" __stringify(SC_BEQZ) " %L1, 1b \n"
"2: " __SYNC(full, loongson3_war) " \n"
" .set pop \n"
: "=&r"(ret),
diff --git a/arch/mips/include/asm/kvm_host.h b/arch/mips/include/asm/kvm_host.h
index 696f6b009377..999bdd4f25b4 100644
--- a/arch/mips/include/asm/kvm_host.h
+++ b/arch/mips/include/asm/kvm_host.h
@@ -20,6 +20,7 @@
#include <linux/threads.h>
#include <linux/spinlock.h>
+#include <asm/asm.h>
#include <asm/inst.h>
#include <asm/mipsregs.h>
@@ -379,9 +380,9 @@ static inline void _kvm_atomic_set_c0_guest_reg(unsigned long *reg,
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
- " " __LL "%0, %1 \n"
+ " "__stringify(LONG_LL) " %0, %1 \n"
" or %0, %2 \n"
- " " __SC "%0, %1 \n"
+ " "__stringify(LONG_SC) " %0, %1 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*reg)
: "r" (val));
@@ -396,9 +397,9 @@ static inline void _kvm_atomic_clear_c0_guest_reg(unsigned long *reg,
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
- " " __LL "%0, %1 \n"
+ " "__stringify(LONG_LL) " %0, %1 \n"
" and %0, %2 \n"
- " " __SC "%0, %1 \n"
+ " "__stringify(LONG_SC) " %0, %1 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*reg)
: "r" (~val));
@@ -414,10 +415,10 @@ static inline void _kvm_atomic_change_c0_guest_reg(unsigned long *reg,
__asm__ __volatile__(
" .set push \n"
" .set "MIPS_ISA_ARCH_LEVEL" \n"
- " " __LL "%0, %1 \n"
+ " "__stringify(LONG_LL) " %0, %1 \n"
" and %0, %2 \n"
" or %0, %3 \n"
- " " __SC "%0, %1 \n"
+ " "__stringify(LONG_SC) " %0, %1 \n"
" .set pop \n"
: "=&r" (temp), "+m" (*reg)
: "r" (~change), "r" (val & change));
diff --git a/arch/mips/include/asm/llsc.h b/arch/mips/include/asm/llsc.h
deleted file mode 100644
index ec09fe5d6d6c..000000000000
--- a/arch/mips/include/asm/llsc.h
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * This file is subject to the terms and conditions of the GNU General Public
- * License. See the file "COPYING" in the main directory of this archive
- * for more details.
- *
- * Macros for 32/64-bit neutral inline assembler
- */
-
-#ifndef __ASM_LLSC_H
-#define __ASM_LLSC_H
-
-#include <asm/isa-rev.h>
-
-#if _MIPS_SZLONG == 32
-#define __LL "ll "
-#define __SC "sc "
-#define __INS "ins "
-#define __EXT "ext "
-#elif _MIPS_SZLONG == 64
-#define __LL "lld "
-#define __SC "scd "
-#define __INS "dins "
-#define __EXT "dext "
-#endif
-
-/*
- * Using a branch-likely instruction to check the result of an sc instruction
- * works around a bug present in R10000 CPUs prior to revision 3.0 that could
- * cause ll-sc sequences to execute non-atomically.
- */
-#ifdef CONFIG_WAR_R10000_LLSC
-# define __SC_BEQZ "beqzl "
-#elif MIPS_ISA_REV >= 6
-# define __SC_BEQZ "beqzc "
-#else
-# define __SC_BEQZ "beqz "
-#endif
-
-#endif /* __ASM_LLSC_H */