summaryrefslogtreecommitdiff
path: root/include/asm-x86/spinlock.h
diff options
context:
space:
mode:
authorJan Beulich <jbeulich@novell.com>2008-09-05 13:26:39 +0100
committerIngo Molnar <mingo@elte.hu>2008-09-05 17:04:08 +0200
commitef1f3413284b9270266cb04a944647e59735f0f1 (patch)
tree0e3cb2780ec0cf79969fcd937d6938abe72a2447 /include/asm-x86/spinlock.h
parent0a328ea43da9c3eefce7cb6c947e43e1a0fef810 (diff)
x86: ticket spin locks: fix asm constraints
In addition to these changes I doubt the 'volatile' on all the ticket lock asm()-s are really necessary. Signed-off-by: Jan Beulich <jbeulich@novell.com> Signed-off-by: Ingo Molnar <mingo@elte.hu>
Diffstat (limited to 'include/asm-x86/spinlock.h')
-rw-r--r--include/asm-x86/spinlock.h6
1 files changed, 3 insertions, 3 deletions
diff --git a/include/asm-x86/spinlock.h b/include/asm-x86/spinlock.h
index 93adae338ac6..acd9bdda55cf 100644
--- a/include/asm-x86/spinlock.h
+++ b/include/asm-x86/spinlock.h
@@ -101,7 +101,7 @@ static __always_inline int __ticket_spin_trylock(raw_spinlock_t *lock)
"1:"
"sete %b1\n\t"
"movzbl %b1,%0\n\t"
- : "=&a" (tmp), "=Q" (new), "+m" (lock->slock)
+ : "=&a" (tmp), "=&Q" (new), "+m" (lock->slock)
:
: "memory", "cc");
@@ -146,7 +146,7 @@ static __always_inline void __ticket_spin_lock(raw_spinlock_t *lock)
/* don't need lfence here, because loads are in-order */
"jmp 1b\n"
"2:"
- : "+Q" (inc), "+m" (lock->slock), "=r" (tmp)
+ : "+r" (inc), "+m" (lock->slock), "=&r" (tmp)
:
: "memory", "cc");
}
@@ -166,7 +166,7 @@ static __always_inline int __ticket_spin_trylock(raw_spinlock_t *lock)
"1:"
"sete %b1\n\t"
"movzbl %b1,%0\n\t"
- : "=&a" (tmp), "=r" (new), "+m" (lock->slock)
+ : "=&a" (tmp), "=&q" (new), "+m" (lock->slock)
:
: "memory", "cc");