summaryrefslogtreecommitdiff
path: root/arch/m32r/include/asm/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/m32r/include/asm/spinlock.h')
-rw-r--r--arch/m32r/include/asm/spinlock.h9
1 files changed, 7 insertions, 2 deletions
diff --git a/arch/m32r/include/asm/spinlock.h b/arch/m32r/include/asm/spinlock.h
index fa13694eaae3..323c7fc953cd 100644
--- a/arch/m32r/include/asm/spinlock.h
+++ b/arch/m32r/include/asm/spinlock.h
@@ -13,6 +13,8 @@
#include <linux/atomic.h>
#include <asm/dcache_clear.h>
#include <asm/page.h>
+#include <asm/barrier.h>
+#include <asm/processor.h>
/*
* Your basic SMP spinlocks, allowing only a single CPU anywhere
@@ -27,8 +29,11 @@
#define arch_spin_is_locked(x) (*(volatile int *)(&(x)->slock) <= 0)
#define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
-#define arch_spin_unlock_wait(x) \
- do { cpu_relax(); } while (arch_spin_is_locked(x))
+
+static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
+{
+ smp_cond_load_acquire(&lock->slock, VAL > 0);
+}
/**
* arch_spin_trylock - Try spin lock and return a result