summaryrefslogtreecommitdiff
path: root/arch/mips/lib/memset.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/mips/lib/memset.S')
-rw-r--r--arch/mips/lib/memset.S109
1 files changed, 66 insertions, 43 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index a1456664d6c2..79405c32cc85 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -8,9 +8,9 @@
* Copyright (C) 2007 by Maciej W. Rozycki
* Copyright (C) 2011, 2012 MIPS Technologies, Inc.
*/
+#include <linux/export.h>
#include <asm/asm.h>
#include <asm/asm-offsets.h>
-#include <asm/export.h>
#include <asm/regdef.h>
#if LONGSIZE == 4
@@ -52,7 +52,7 @@
9: ___BUILD_EVA_INSN(insn, reg, addr); \
.endif; \
.section __ex_table,"a"; \
- PTR 9b, handler; \
+ PTR_WD 9b, handler; \
.previous
.macro f_fill64 dst, offset, val, fixup, mode
@@ -78,7 +78,6 @@
#endif
.endm
- .set noreorder
.align 5
/*
@@ -94,25 +93,29 @@
.endif
sltiu t0, a2, STORSIZE /* very small region? */
+ .set noreorder
bnez t0, .Lsmall_memset\@
- andi t0, a0, STORMASK /* aligned? */
+ andi t0, a0, STORMASK /* aligned? */
+ .set reorder
#ifdef CONFIG_CPU_MICROMIPS
move t8, a1 /* used by 'swp' instruction */
move t9, a1
#endif
+ .set noreorder
#ifndef CONFIG_CPU_DADDI_WORKAROUNDS
beqz t0, 1f
- PTR_SUBU t0, STORSIZE /* alignment in bytes */
+ PTR_SUBU t0, STORSIZE /* alignment in bytes */
#else
.set noat
li AT, STORSIZE
beqz t0, 1f
- PTR_SUBU t0, AT /* alignment in bytes */
+ PTR_SUBU t0, AT /* alignment in bytes */
.set at
#endif
+ .set reorder
-#ifndef CONFIG_CPU_MIPSR6
+#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
@@ -122,11 +125,13 @@
PTR_SUBU a0, t0 /* long align ptr */
PTR_ADDU a2, t0 /* correct size */
-#else /* CONFIG_CPU_MIPSR6 */
+#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
#define STORE_BYTE(N) \
EX(sb, a1, N(a0), .Lbyte_fixup\@); \
+ .set noreorder; \
beqz t0, 0f; \
- PTR_ADDU t0, 1;
+ PTR_ADDU t0, 1; \
+ .set reorder;
PTR_ADDU a2, t0 /* correct size */
PTR_ADDU t0, 1
@@ -145,19 +150,17 @@
ori a0, STORMASK
xori a0, STORMASK
PTR_ADDIU a0, STORSIZE
-#endif /* CONFIG_CPU_MIPSR6 */
+#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
1: ori t1, a2, 0x3f /* # of full blocks */
xori t1, 0x3f
- beqz t1, .Lmemset_partial\@ /* no block to fill */
andi t0, a2, 0x40-STORSIZE
+ beqz t1, .Lmemset_partial\@ /* no block to fill */
PTR_ADDU t1, a0 /* end address */
- .set reorder
1: PTR_ADDIU a0, 64
R10KCBARRIER(0(ra))
f_fill64 a0, -64, FILL64RG, .Lfwd_fixup\@, \mode
bne t1, a0, 1b
- .set noreorder
.Lmemset_partial\@:
R10KCBARRIER(0(ra))
@@ -173,28 +176,28 @@
PTR_SUBU t1, AT
.set at
#endif
- jr t1
PTR_ADDU a0, t0 /* dest ptr */
+ jr t1
- .set push
- .set noreorder
- .set nomacro
/* ... but first do longs ... */
f_fill64 a0, -64, FILL64RG, .Lpartial_fixup\@, \mode
-2: .set pop
- andi a2, STORMASK /* At most one long to go */
+2: andi a2, STORMASK /* At most one long to go */
+ .set noreorder
beqz a2, 1f
-#ifndef CONFIG_CPU_MIPSR6
- PTR_ADDU a0, a2 /* What's left */
+#ifndef CONFIG_CPU_NO_LOAD_STORE_LR
+ PTR_ADDU a0, a2 /* What's left */
+ .set reorder
R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
EX(LONG_S_R, a1, -1(a0), .Llast_fixup\@)
#else
EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
#endif
-#else
- PTR_SUBU t0, $0, a2
+#else /* CONFIG_CPU_NO_LOAD_STORE_LR */
+ PTR_SUBU t0, $0, a2
+ .set reorder
+ move a2, zero /* No remaining longs */
PTR_ADDIU t0, 1
STORE_BYTE(0)
STORE_BYTE(1)
@@ -208,57 +211,80 @@
EX(sb, a1, 6(a0), .Lbyte_fixup\@)
#endif
0:
-#endif
-1: jr ra
- move a2, zero
+#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
+1: move a2, zero
+ jr ra
.Lsmall_memset\@:
- beqz a2, 2f
PTR_ADDU t1, a0, a2
+ beqz a2, 2f
1: PTR_ADDIU a0, 1 /* fill bytewise */
R10KCBARRIER(0(ra))
+ .set noreorder
bne t1, a0, 1b
- sb a1, -1(a0)
+ EX(sb, a1, -1(a0), .Lsmall_fixup\@)
+ .set reorder
-2: jr ra /* done */
- move a2, zero
+2: move a2, zero
+ jr ra /* done */
.if __memset == 1
END(memset)
.set __memset, 0
.hidden __memset
.endif
-#ifdef CONFIG_CPU_MIPSR6
+#ifdef CONFIG_CPU_NO_LOAD_STORE_LR
.Lbyte_fixup\@:
- PTR_SUBU a2, $0, t0
+ /*
+ * unset_bytes = (#bytes - (#unaligned bytes)) - (-#unaligned bytes remaining + 1) + 1
+ * a2 = a2 - t0 + 1
+ */
+ PTR_SUBU a2, t0
+ PTR_ADDIU a2, 1
jr ra
- PTR_ADDIU a2, 1
-#endif /* CONFIG_CPU_MIPSR6 */
+#endif /* CONFIG_CPU_NO_LOAD_STORE_LR */
.Lfirst_fixup\@:
+ /* unset_bytes already in a2 */
jr ra
- nop
.Lfwd_fixup\@:
+ /*
+ * unset_bytes = partial_start_addr + #bytes - fault_addr
+ * a2 = t1 + (a2 & 3f) - $28->task->BUADDR
+ */
PTR_L t0, TI_TASK($28)
andi a2, 0x3f
LONG_L t0, THREAD_BUADDR(t0)
LONG_ADDU a2, t1
- jr ra
LONG_SUBU a2, t0
+ jr ra
.Lpartial_fixup\@:
+ /*
+ * unset_bytes = partial_end_addr + #bytes - fault_addr
+ * a2 = a0 + (a2 & STORMASK) - $28->task->BUADDR
+ */
PTR_L t0, TI_TASK($28)
andi a2, STORMASK
LONG_L t0, THREAD_BUADDR(t0)
- LONG_ADDU a2, t1
- jr ra
+ LONG_ADDU a2, a0
LONG_SUBU a2, t0
+ jr ra
.Llast_fixup\@:
+ /* unset_bytes already in a2 */
+ jr ra
+
+.Lsmall_fixup\@:
+ /*
+ * unset_bytes = end_addr - current_addr + 1
+ * a2 = t1 - a0 + 1
+ */
+ PTR_SUBU a2, t1, a0
+ PTR_ADDIU a2, 1
jr ra
- andi v1, a2, STORMASK
.endm
@@ -272,8 +298,8 @@
LEAF(memset)
EXPORT_SYMBOL(memset)
- beqz a1, 1f
move v0, a0 /* result */
+ beqz a1, 1f
andi a1, 0xff /* spread fillword */
LONG_SLL t1, a1, 8
@@ -288,9 +314,6 @@ EXPORT_SYMBOL(memset)
#ifndef CONFIG_EVA
FEXPORT(__bzero)
EXPORT_SYMBOL(__bzero)
-#else
-FEXPORT(__bzero_kernel)
-EXPORT_SYMBOL(__bzero_kernel)
#endif
__BUILD_BZERO LEGACY_MODE