summaryrefslogtreecommitdiff
path: root/arch/parisc/kernel/syscall.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/parisc/kernel/syscall.S')
-rw-r--r--arch/parisc/kernel/syscall.S67
1 files changed, 49 insertions, 18 deletions
diff --git a/arch/parisc/kernel/syscall.S b/arch/parisc/kernel/syscall.S
index 1373e5129868..f58c4bccfbce 100644
--- a/arch/parisc/kernel/syscall.S
+++ b/arch/parisc/kernel/syscall.S
@@ -39,6 +39,7 @@ registers).
#include <asm/assembly.h>
#include <asm/processor.h>
#include <asm/cache.h>
+#include <asm/spinlock_types.h>
#include <linux/linkage.h>
@@ -66,6 +67,16 @@ registers).
stw \reg1, 0(%sr2,\reg2)
.endm
+ /* raise exception if spinlock content is not zero or
+ * __ARCH_SPIN_LOCK_UNLOCKED_VAL */
+ .macro spinlock_check spin_val,tmpreg
+#ifdef CONFIG_LIGHTWEIGHT_SPINLOCK_CHECK
+ ldi __ARCH_SPIN_LOCK_UNLOCKED_VAL, \tmpreg
+ andcm,= \spin_val, \tmpreg, %r0
+ .word SPINLOCK_BREAK_INSN
+#endif
+ .endm
+
.text
.import syscall_exit,code
@@ -232,10 +243,10 @@ linux_gateway_entry:
#ifdef CONFIG_64BIT
ldil L%sys_call_table, %r1
- or,= %r2,%r2,%r2
- addil L%(sys_call_table64-sys_call_table), %r1
+ or,ev %r2,%r2,%r2
+ ldil L%sys_call_table64, %r1
ldo R%sys_call_table(%r1), %r19
- or,= %r2,%r2,%r2
+ or,ev %r2,%r2,%r2
ldo R%sys_call_table64(%r1), %r19
#else
load32 sys_call_table, %r19
@@ -368,10 +379,10 @@ tracesys_next:
extrd,u %r19,63,1,%r2 /* W hidden in bottom bit */
ldil L%sys_call_table, %r1
- or,= %r2,%r2,%r2
- addil L%(sys_call_table64-sys_call_table), %r1
+ or,ev %r2,%r2,%r2
+ ldil L%sys_call_table64, %r1
ldo R%sys_call_table(%r1), %r19
- or,= %r2,%r2,%r2
+ or,ev %r2,%r2,%r2
ldo R%sys_call_table64(%r1), %r19
#else
load32 sys_call_table, %r19
@@ -508,7 +519,8 @@ lws_start:
lws_exit_noerror:
lws_pagefault_enable %r1,%r21
- stw,ma %r20, 0(%sr2,%r20)
+ ldi __ARCH_SPIN_LOCK_UNLOCKED_VAL, %r21
+ stw,ma %r21, 0(%sr2,%r20)
ssm PSW_SM_I, %r0
b lws_exit
copy %r0, %r21
@@ -521,7 +533,8 @@ lws_wouldblock:
lws_pagefault:
lws_pagefault_enable %r1,%r21
- stw,ma %r20, 0(%sr2,%r20)
+ ldi __ARCH_SPIN_LOCK_UNLOCKED_VAL, %r21
+ stw,ma %r21, 0(%sr2,%r20)
ssm PSW_SM_I, %r0
ldo 3(%r0),%r28
b lws_exit
@@ -600,6 +613,9 @@ lws_compare_and_swap32:
lws_compare_and_swap:
/* Trigger memory reference interruptions without writing to memory */
1: ldw 0(%r26), %r28
+ proberi (%r26), PRIV_USER, %r28
+ comb,=,n %r28, %r0, lws_fault /* backwards, likely not taken */
+ nop
2: stbys,e %r0, 0(%r26)
/* Calculate 8-bit hash index from virtual address */
@@ -619,6 +635,7 @@ lws_compare_and_swap:
/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
+ spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock
@@ -753,6 +770,9 @@ cas2_lock_start:
copy %r26, %r28
depi_safe 0, 31, 2, %r28
10: ldw 0(%r28), %r1
+ proberi (%r28), PRIV_USER, %r1
+ comb,=,n %r1, %r0, lws_fault /* backwards, likely not taken */
+ nop
11: stbys,e %r0, 0(%r28)
/* Calculate 8-bit hash index from virtual address */
@@ -772,6 +792,7 @@ cas2_lock_start:
/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
+ spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock
@@ -936,41 +957,47 @@ atomic_xchg_begin:
/* 8-bit exchange */
1: ldb 0(%r24), %r20
+ proberi (%r24), PRIV_USER, %r20
+ comb,=,n %r20, %r0, lws_fault /* backwards, likely not taken */
+ nop
copy %r23, %r20
depi_safe 0, 31, 2, %r20
b atomic_xchg_start
2: stbys,e %r0, 0(%r20)
- nop
- nop
- nop
/* 16-bit exchange */
3: ldh 0(%r24), %r20
+ proberi (%r24), PRIV_USER, %r20
+ comb,=,n %r20, %r0, lws_fault /* backwards, likely not taken */
+ nop
copy %r23, %r20
depi_safe 0, 31, 2, %r20
b atomic_xchg_start
4: stbys,e %r0, 0(%r20)
- nop
- nop
- nop
/* 32-bit exchange */
5: ldw 0(%r24), %r20
+ proberi (%r24), PRIV_USER, %r20
+ comb,=,n %r20, %r0, lws_fault /* backwards, likely not taken */
+ nop
b atomic_xchg_start
6: stbys,e %r0, 0(%r23)
nop
nop
- nop
- nop
- nop
/* 64-bit exchange */
#ifdef CONFIG_64BIT
7: ldd 0(%r24), %r20
+ proberi (%r24), PRIV_USER, %r20
+ comb,=,n %r20, %r0, lws_fault /* backwards, likely not taken */
+ nop
8: stdby,e %r0, 0(%r23)
#else
7: ldw 0(%r24), %r20
8: ldw 4(%r24), %r20
+ proberi (%r24), PRIV_USER, %r20
+ comb,=,n %r20, %r0, lws_fault /* backwards, likely not taken */
+ nop
copy %r23, %r20
depi_safe 0, 31, 2, %r20
9: stbys,e %r0, 0(%r20)
@@ -1001,6 +1028,7 @@ atomic_xchg_start:
/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
+ spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock
@@ -1199,6 +1227,7 @@ atomic_store_start:
/* Try to acquire the lock */
LDCW 0(%sr2,%r20), %r28
+ spinlock_check %r28, %r21
comclr,<> %r0, %r28, %r0
b,n lws_wouldblock
@@ -1310,6 +1339,8 @@ ENTRY(sys_call_table)
END(sys_call_table)
#ifdef CONFIG_64BIT
+#undef __SYSCALL_WITH_COMPAT
+#define __SYSCALL_WITH_COMPAT(nr, native, compat) __SYSCALL(nr, native)
.align 8
ENTRY(sys_call_table64)
#include <asm/syscall_table_64.h> /* 64-bit syscalls */
@@ -1330,7 +1361,7 @@ ENTRY(lws_lock_start)
/* lws locks */
.rept 256
/* Keep locks aligned at 16-bytes */
- .word 1
+ .word __ARCH_SPIN_LOCK_UNLOCKED_VAL
.word 0
.word 0
.word 0