summaryrefslogtreecommitdiff
path: root/arch/csky/include/asm/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/csky/include/asm/spinlock.h')
-rw-r--r--arch/csky/include/asm/spinlock.h167
1 files changed, 0 insertions, 167 deletions
diff --git a/arch/csky/include/asm/spinlock.h b/arch/csky/include/asm/spinlock.h
index 7cf3f2b34cea..69f5aa249c5f 100644
--- a/arch/csky/include/asm/spinlock.h
+++ b/arch/csky/include/asm/spinlock.h
@@ -6,8 +6,6 @@
#include <linux/spinlock_types.h>
#include <asm/barrier.h>
-#ifdef CONFIG_QUEUED_RWLOCKS
-
/*
* Ticket-based spin-locking.
*/
@@ -88,169 +86,4 @@ static inline int arch_spin_is_contended(arch_spinlock_t *lock)
#include <asm/qrwlock.h>
-/* See include/linux/spinlock.h */
-#define smp_mb__after_spinlock() smp_mb()
-
-#else /* CONFIG_QUEUED_RWLOCKS */
-
-/*
- * Test-and-set spin-locking.
- */
-static inline void arch_spin_lock(arch_spinlock_t *lock)
-{
- u32 *p = &lock->lock;
- u32 tmp;
-
- asm volatile (
- "1: ldex.w %0, (%1) \n"
- " bnez %0, 1b \n"
- " movi %0, 1 \n"
- " stex.w %0, (%1) \n"
- " bez %0, 1b \n"
- : "=&r" (tmp)
- : "r"(p)
- : "cc");
- smp_mb();
-}
-
-static inline void arch_spin_unlock(arch_spinlock_t *lock)
-{
- smp_mb();
- WRITE_ONCE(lock->lock, 0);
-}
-
-static inline int arch_spin_trylock(arch_spinlock_t *lock)
-{
- u32 *p = &lock->lock;
- u32 tmp;
-
- asm volatile (
- "1: ldex.w %0, (%1) \n"
- " bnez %0, 2f \n"
- " movi %0, 1 \n"
- " stex.w %0, (%1) \n"
- " bez %0, 1b \n"
- " movi %0, 0 \n"
- "2: \n"
- : "=&r" (tmp)
- : "r"(p)
- : "cc");
-
- if (!tmp)
- smp_mb();
-
- return !tmp;
-}
-
-#define arch_spin_is_locked(x) (READ_ONCE((x)->lock) != 0)
-
-/*
- * read lock/unlock/trylock
- */
-static inline void arch_read_lock(arch_rwlock_t *lock)
-{
- u32 *p = &lock->lock;
- u32 tmp;
-
- asm volatile (
- "1: ldex.w %0, (%1) \n"
- " blz %0, 1b \n"
- " addi %0, 1 \n"
- " stex.w %0, (%1) \n"
- " bez %0, 1b \n"
- : "=&r" (tmp)
- : "r"(p)
- : "cc");
- smp_mb();
-}
-
-static inline void arch_read_unlock(arch_rwlock_t *lock)
-{
- u32 *p = &lock->lock;
- u32 tmp;
-
- smp_mb();
- asm volatile (
- "1: ldex.w %0, (%1) \n"
- " subi %0, 1 \n"
- " stex.w %0, (%1) \n"
- " bez %0, 1b \n"
- : "=&r" (tmp)
- : "r"(p)
- : "cc");
-}
-
-static inline int arch_read_trylock(arch_rwlock_t *lock)
-{
- u32 *p = &lock->lock;
- u32 tmp;
-
- asm volatile (
- "1: ldex.w %0, (%1) \n"
- " blz %0, 2f \n"
- " addi %0, 1 \n"
- " stex.w %0, (%1) \n"
- " bez %0, 1b \n"
- " movi %0, 0 \n"
- "2: \n"
- : "=&r" (tmp)
- : "r"(p)
- : "cc");
-
- if (!tmp)
- smp_mb();
-
- return !tmp;
-}
-
-/*
- * write lock/unlock/trylock
- */
-static inline void arch_write_lock(arch_rwlock_t *lock)
-{
- u32 *p = &lock->lock;
- u32 tmp;
-
- asm volatile (
- "1: ldex.w %0, (%1) \n"
- " bnez %0, 1b \n"
- " subi %0, 1 \n"
- " stex.w %0, (%1) \n"
- " bez %0, 1b \n"
- : "=&r" (tmp)
- : "r"(p)
- : "cc");
- smp_mb();
-}
-
-static inline void arch_write_unlock(arch_rwlock_t *lock)
-{
- smp_mb();
- WRITE_ONCE(lock->lock, 0);
-}
-
-static inline int arch_write_trylock(arch_rwlock_t *lock)
-{
- u32 *p = &lock->lock;
- u32 tmp;
-
- asm volatile (
- "1: ldex.w %0, (%1) \n"
- " bnez %0, 2f \n"
- " subi %0, 1 \n"
- " stex.w %0, (%1) \n"
- " bez %0, 1b \n"
- " movi %0, 0 \n"
- "2: \n"
- : "=&r" (tmp)
- : "r"(p)
- : "cc");
-
- if (!tmp)
- smp_mb();
-
- return !tmp;
-}
-
-#endif /* CONFIG_QUEUED_RWLOCKS */
#endif /* __ASM_CSKY_SPINLOCK_H */