summaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm/fpsimdmacros.h
diff options
context:
space:
mode:
authorMark Brown <broonie@kernel.org>2021-10-19 18:22:13 +0100
committerWill Deacon <will@kernel.org>2021-10-21 10:18:17 +0100
commitddc806b5c4752d35bdaa4dfa2aaa72785711a3da (patch)
tree7902423b7289171ea41c732e7077f5dd0d17a510 /arch/arm64/include/asm/fpsimdmacros.h
parentb5bc00ffddc08c20a799514cbcfd2abaa6718014 (diff)
arm64/sve: Explicitly load vector length when restoring SVE state
Currently when restoring the SVE state we supply the SVE vector length as an argument to sve_load_state() and the underlying macros. This becomes inconvenient with the addition of SME since we may need to restore any combination of SVE and SME vector lengths, and we already separately restore the vector length in the KVM code. We don't need to know the vector length during the actual register load since the SME load instructions can index into the data array for us. Refactor the interface so we explicitly set the vector length separately to restoring the SVE registers in preparation for adding SME support, no functional change should be involved. Signed-off-by: Mark Brown <broonie@kernel.org> Link: https://lore.kernel.org/r/20211019172247.3045838-9-broonie@kernel.org Signed-off-by: Will Deacon <will@kernel.org>
Diffstat (limited to 'arch/arm64/include/asm/fpsimdmacros.h')
-rw-r--r--arch/arm64/include/asm/fpsimdmacros.h7
1 files changed, 1 insertions, 6 deletions
diff --git a/arch/arm64/include/asm/fpsimdmacros.h b/arch/arm64/include/asm/fpsimdmacros.h
index e5ffd8b265b6..2509d7dde55a 100644
--- a/arch/arm64/include/asm/fpsimdmacros.h
+++ b/arch/arm64/include/asm/fpsimdmacros.h
@@ -241,7 +241,7 @@
str w\nxtmp, [\xpfpsr, #4]
.endm
-.macro __sve_load nxbase, xpfpsr, restore_ffr, nxtmp
+.macro sve_load nxbase, xpfpsr, restore_ffr, nxtmp
_for n, 0, 31, _sve_ldr_v \n, \nxbase, \n - 34
cbz \restore_ffr, 921f
_sve_ldr_p 0, \nxbase
@@ -254,8 +254,3 @@
ldr w\nxtmp, [\xpfpsr, #4]
msr fpcr, x\nxtmp
.endm
-
-.macro sve_load nxbase, xpfpsr, restore_ffr, xvqminus1, nxtmp, xtmp2
- sve_load_vq \xvqminus1, x\nxtmp, \xtmp2
- __sve_load \nxbase, \xpfpsr, \restore_ffr, \nxtmp
-.endm