summaryrefslogtreecommitdiff
path: root/arch/mips/lib
diff options
context:
space:
mode:
authorLeonid Yegoshin <Leonid.Yegoshin@imgtec.com>2014-11-18 09:04:34 +0000
committerMarkos Chandras <markos.chandras@imgtec.com>2015-02-17 15:37:30 +0000
commit8c56208aff779a9c9086089b23e01b92b74a939a (patch)
treee62df25548b326075170677cd6ec530680cbadd8 /arch/mips/lib
parentb0ce4bd535a68e5814b8470f1f8a49771f37b0a2 (diff)
MIPS: lib: memset: Add MIPS R6 support
MIPS R6 dropped the unaligned load and store instructions so we need to re-write this part of the code for R6 to store one byte at a time. Signed-off-by: Leonid Yegoshin <Leonid.Yegoshin@imgtec.com> Signed-off-by: Markos Chandras <markos.chandras@imgtec.com>
Diffstat (limited to 'arch/mips/lib')
-rw-r--r--arch/mips/lib/memset.S47
1 files changed, 47 insertions, 0 deletions
diff --git a/arch/mips/lib/memset.S b/arch/mips/lib/memset.S
index c8fe6b1968fb..b8e63fd00375 100644
--- a/arch/mips/lib/memset.S
+++ b/arch/mips/lib/memset.S
@@ -111,6 +111,7 @@
.set at
#endif
+#ifndef CONFIG_CPU_MIPSR6
R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
EX(LONG_S_L, a1, (a0), .Lfirst_fixup\@) /* make word/dword aligned */
@@ -120,6 +121,30 @@
PTR_SUBU a0, t0 /* long align ptr */
PTR_ADDU a2, t0 /* correct size */
+#else /* CONFIG_CPU_MIPSR6 */
+#define STORE_BYTE(N) \
+ EX(sb, a1, N(a0), .Lbyte_fixup\@); \
+ beqz t0, 0f; \
+ PTR_ADDU t0, 1;
+
+ PTR_ADDU a2, t0 /* correct size */
+ PTR_ADDU t0, 1
+ STORE_BYTE(0)
+ STORE_BYTE(1)
+#if LONGSIZE == 4
+ EX(sb, a1, 2(a0), .Lbyte_fixup\@)
+#else
+ STORE_BYTE(2)
+ STORE_BYTE(3)
+ STORE_BYTE(4)
+ STORE_BYTE(5)
+ EX(sb, a1, 6(a0), .Lbyte_fixup\@)
+#endif
+0:
+ ori a0, STORMASK
+ xori a0, STORMASK
+ PTR_ADDIU a0, STORSIZE
+#endif /* CONFIG_CPU_MIPSR6 */
1: ori t1, a2, 0x3f /* # of full blocks */
xori t1, 0x3f
beqz t1, .Lmemset_partial\@ /* no block to fill */
@@ -159,6 +184,7 @@
andi a2, STORMASK /* At most one long to go */
beqz a2, 1f
+#ifndef CONFIG_CPU_MIPSR6
PTR_ADDU a0, a2 /* What's left */
R10KCBARRIER(0(ra))
#ifdef __MIPSEB__
@@ -166,6 +192,22 @@
#else
EX(LONG_S_L, a1, -1(a0), .Llast_fixup\@)
#endif
+#else
+ PTR_SUBU t0, $0, a2
+ PTR_ADDIU t0, 1
+ STORE_BYTE(0)
+ STORE_BYTE(1)
+#if LONGSIZE == 4
+ EX(sb, a1, 2(a0), .Lbyte_fixup\@)
+#else
+ STORE_BYTE(2)
+ STORE_BYTE(3)
+ STORE_BYTE(4)
+ STORE_BYTE(5)
+ EX(sb, a1, 6(a0), .Lbyte_fixup\@)
+#endif
+0:
+#endif
1: jr ra
move a2, zero
@@ -186,6 +228,11 @@
.hidden __memset
.endif
+.Lbyte_fixup\@:
+ PTR_SUBU a2, $0, t0
+ jr ra
+ PTR_ADDIU a2, 1
+
.Lfirst_fixup\@:
jr ra
nop