summaryrefslogtreecommitdiff
path: root/arch/powerpc/kernel/vector.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/kernel/vector.S')
-rw-r--r--arch/powerpc/kernel/vector.S55
1 files changed, 39 insertions, 16 deletions
diff --git a/arch/powerpc/kernel/vector.S b/arch/powerpc/kernel/vector.S
index 8eb867dbad5f..80b3f6e476b6 100644
--- a/arch/powerpc/kernel/vector.S
+++ b/arch/powerpc/kernel/vector.S
@@ -1,4 +1,6 @@
/* SPDX-License-Identifier: GPL-2.0 */
+#include <linux/export.h>
+#include <linux/linkage.h>
#include <asm/processor.h>
#include <asm/ppc_asm.h>
#include <asm/reg.h>
@@ -7,7 +9,6 @@
#include <asm/thread_info.h>
#include <asm/page.h>
#include <asm/ptrace.h>
-#include <asm/export.h>
#include <asm/asm-compat.h>
/*
@@ -32,6 +33,7 @@ _GLOBAL(store_vr_state)
mfvscr v0
li r4, VRSTATE_VSCR
stvx v0, r4, r3
+ lvx v0, 0, r3
blr
EXPORT_SYMBOL(store_vr_state)
@@ -47,6 +49,10 @@ EXPORT_SYMBOL(store_vr_state)
*/
_GLOBAL(load_up_altivec)
mfmsr r5 /* grab the current MSR */
+#ifdef CONFIG_PPC_BOOK3S_64
+ /* interrupt doesn't set MSR[RI] and HPT can fault on current access */
+ ori r5,r5,MSR_RI
+#endif
oris r5,r5,MSR_VEC@h
MTMSRD(r5) /* enable use of AltiVec now */
isync
@@ -65,20 +71,21 @@ _GLOBAL(load_up_altivec)
1:
/* enable use of VMX after return */
#ifdef CONFIG_PPC32
- mfspr r5,SPRN_SPRG_THREAD /* current task's THREAD (phys) */
+ addi r5,r2,THREAD
oris r9,r9,MSR_VEC@h
#else
ld r4,PACACURRENT(r13)
addi r5,r4,THREAD /* Get THREAD */
oris r12,r12,MSR_VEC@h
std r12,_MSR(r1)
+#ifdef CONFIG_PPC_BOOK3S_64
+ li r4,0
+ stb r4,PACASRR_VALID(r13)
+#endif
#endif
- /* Don't care if r4 overflows, this is desired behaviour */
- lbz r4,THREAD_LOAD_VEC(r5)
- addi r4,r4,1
+ li r4,1
stb r4,THREAD_LOAD_VEC(r5)
addi r6,r5,THREAD_VRSTATE
- li r4,1
li r10,VRSTATE_VSCR
stw r4,THREAD_USED_VR(r5)
lvx v0,r10,r6
@@ -86,6 +93,7 @@ _GLOBAL(load_up_altivec)
REST_32VRS(0,r4,r6)
/* restore registers and return */
blr
+_ASM_NOKPROBE_SYMBOL(load_up_altivec)
/*
* save_altivec(tsk)
@@ -102,6 +110,7 @@ _GLOBAL(save_altivec)
mfvscr v0
li r4,VRSTATE_VSCR
stvx v0,r4,r7
+ lvx v0,0,r7
blr
#ifdef CONFIG_VSX
@@ -124,6 +133,12 @@ _GLOBAL(load_up_vsx)
andis. r5,r12,MSR_VEC@h
beql+ load_up_altivec /* skip if already loaded */
+#ifdef CONFIG_PPC_BOOK3S_64
+ /* interrupt doesn't set MSR[RI] and HPT can fault on current access */
+ li r5,MSR_RI
+ mtmsrd r5,1
+#endif
+
ld r4,PACACURRENT(r13)
addi r4,r4,THREAD /* Get THREAD */
li r6,1
@@ -131,7 +146,9 @@ _GLOBAL(load_up_vsx)
/* enable use of VSX after return */
oris r12,r12,MSR_VSX@h
std r12,_MSR(r1)
- b fast_exception_return
+ li r4,0
+ stb r4,PACASRR_VALID(r13)
+ b fast_interrupt_return_srr
#endif /* CONFIG_VSX */
@@ -141,8 +158,8 @@ _GLOBAL(load_up_vsx)
* usage of floating-point registers. These routines must be called
* with preempt disabled.
*/
-#ifdef CONFIG_PPC32
.data
+#ifdef CONFIG_PPC32
fpzero:
.long 0
fpone:
@@ -155,24 +172,29 @@ fphalf:
lfs fr,name@l(r11)
#else
- .section ".toc","aw"
fpzero:
- .tc FD_0_0[TC],0
+ .quad 0
fpone:
- .tc FD_3ff00000_0[TC],0x3ff0000000000000 /* 1.0 */
+ .quad 0x3ff0000000000000 /* 1.0 */
fphalf:
- .tc FD_3fe00000_0[TC],0x3fe0000000000000 /* 0.5 */
+ .quad 0x3fe0000000000000 /* 0.5 */
-#define LDCONST(fr, name) \
- lfd fr,name@toc(r2)
+#ifdef CONFIG_PPC_KERNEL_PCREL
+#define LDCONST(fr, name) \
+ pla r11,name@pcrel; \
+ lfd fr,0(r11)
+#else
+#define LDCONST(fr, name) \
+ addis r11,r2,name@toc@ha; \
+ lfd fr,name@toc@l(r11)
+#endif
#endif
-
.text
/*
* Internal routine to enable floating point and set FPSCR to 0.
* Don't call it from C; it doesn't use the normal calling convention.
*/
-fpenable:
+SYM_FUNC_START_LOCAL(fpenable)
#ifdef CONFIG_PPC32
stwu r1,-64(r1)
#else
@@ -189,6 +211,7 @@ fpenable:
mffs fr31
MTFSF_L(fr1)
blr
+SYM_FUNC_END(fpenable)
fpdisable:
mtlr r12