summaryrefslogtreecommitdiff
path: root/arch/mips/include/asm/cmpxchg.h
diff options
context:
space:
mode:
authorMarkos Chandras <markos.chandras@imgtec.com>2015-01-26 12:44:11 +0000
committerMarkos Chandras <markos.chandras@imgtec.com>2015-02-17 15:37:21 +0000
commit94bfb75ace81f7b09860400ba02ed1607a2e0e27 (patch)
tree43c515ed18ed72bd10e6269be1545e41bd2a5335 /arch/mips/include/asm/cmpxchg.h
parenta7e07b1ae550303c6611f4d3b054a4f9c2bc8a9e (diff)
MIPS: asm: Rename GCC_OFF12_ASM to GCC_OFF_SMALL_ASM
The GCC_OFF12_ASM macro is used for 12-bit immediate constrains but we will also use it for 9-bit constrains on MIPS R6 so we rename it to something more appropriate. Cc: Maciej W. Rozycki <macro@linux-mips.org> Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
Diffstat (limited to 'arch/mips/include/asm/cmpxchg.h')
-rw-r--r--arch/mips/include/asm/cmpxchg.h24
1 files changed, 12 insertions, 12 deletions
diff --git a/arch/mips/include/asm/cmpxchg.h b/arch/mips/include/asm/cmpxchg.h
index 28b1edf19501..68baa0cf521a 100644
--- a/arch/mips/include/asm/cmpxchg.h
+++ b/arch/mips/include/asm/cmpxchg.h
@@ -31,8 +31,8 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
" sc %2, %1 \n"
" beqzl %2, 1b \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
- : GCC_OFF12_ASM() (*m), "Jr" (val)
+ : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory");
} else if (kernel_uses_llsc) {
unsigned long dummy;
@@ -46,9 +46,9 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
" .set arch=r4000 \n"
" sc %2, %1 \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
+ : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
"=&r" (dummy)
- : GCC_OFF12_ASM() (*m), "Jr" (val)
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory");
} while (unlikely(!dummy));
} else {
@@ -82,8 +82,8 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
" scd %2, %1 \n"
" beqzl %2, 1b \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF12_ASM() (*m), "=&r" (dummy)
- : GCC_OFF12_ASM() (*m), "Jr" (val)
+ : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m), "=&r" (dummy)
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory");
} else if (kernel_uses_llsc) {
unsigned long dummy;
@@ -95,9 +95,9 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
" move %2, %z4 \n"
" scd %2, %1 \n"
" .set mips0 \n"
- : "=&r" (retval), "=" GCC_OFF12_ASM() (*m),
+ : "=&r" (retval), "=" GCC_OFF_SMALL_ASM() (*m),
"=&r" (dummy)
- : GCC_OFF12_ASM() (*m), "Jr" (val)
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (val)
: "memory");
} while (unlikely(!dummy));
} else {
@@ -158,8 +158,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
" beqzl $1, 1b \n" \
"2: \n" \
" .set pop \n" \
- : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \
- : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \
+ : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
: "memory"); \
} else if (kernel_uses_llsc) { \
__asm__ __volatile__( \
@@ -175,8 +175,8 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
" beqz $1, 1b \n" \
" .set pop \n" \
"2: \n" \
- : "=&r" (__ret), "=" GCC_OFF12_ASM() (*m) \
- : GCC_OFF12_ASM() (*m), "Jr" (old), "Jr" (new) \
+ : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
+ : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
: "memory"); \
} else { \
unsigned long __flags; \