diff options
author | Marc Zyngier <maz@kernel.org> | 2025-01-07 22:59:41 +0000 |
---|---|---|
committer | Will Deacon <will@kernel.org> | 2025-01-08 13:41:06 +0000 |
commit | 064737920bdbca86df91b96aed256e88018fef3a (patch) | |
tree | 4ea4ac2da4f355b1719007f7396f9075fca28921 | |
parent | d3c7c48d004f6c8d892f39b5d69884fd0fe98c81 (diff) |
arm64: Filter out SVE hwcaps when FEAT_SVE isn't implemented
The hwcaps code that exposes SVE features to userspace only
considers ID_AA64ZFR0_EL1, while this is only valid when
ID_AA64PFR0_EL1.SVE advertises that SVE is actually supported.
The expectations are that when ID_AA64PFR0_EL1.SVE is 0, the
ID_AA64ZFR0_EL1 register is also 0. So far, so good.
Things become a bit more interesting if the HW implements SME.
In this case, a few ID_AA64ZFR0_EL1 fields indicate *SME*
features. And these fields overlap with their SVE interpretations.
But the architecture says that the SME and SVE feature sets must
match, so we're still hunky-dory.
This goes wrong if the HW implements SME, but not SVE. In this
case, we end-up advertising some SVE features to userspace, even
if the HW has none. That's because we never consider whether SVE
is actually implemented. Oh well.
Fix it by restricting all SVE capabilities to ID_AA64PFR0_EL1.SVE
being non-zero. The HWCAPS documentation is amended to reflect the
actually checks performed by the kernel.
Fixes: 06a916feca2b ("arm64: Expose SVE2 features for userspace")
Reported-by: Catalin Marinas <catalin.marinas@arm.com>
Signed-off-by: Marc Zyngier <maz@kernel.org>
Signed-off-by: Mark Brown <broonie@kernel.org>
Cc: Will Deacon <will@kernel.org>
Cc: Mark Rutland <mark.rutland@arm.com>
Cc: stable@vger.kernel.org
Reviewed-by: Mark Brown <broonie@kernel.org>
Link: https://lore.kernel.org/r/20250107-arm64-2024-dpisa-v5-1-7578da51fc3d@kernel.org
Signed-off-by: Will Deacon <will@kernel.org>
-rw-r--r-- | Documentation/arch/arm64/elf_hwcaps.rst | 39 | ||||
-rw-r--r-- | arch/arm64/kernel/cpufeature.c | 40 |
2 files changed, 53 insertions, 26 deletions
diff --git a/Documentation/arch/arm64/elf_hwcaps.rst b/Documentation/arch/arm64/elf_hwcaps.rst index 2ff922a406ad..1a31723e79fd 100644 --- a/Documentation/arch/arm64/elf_hwcaps.rst +++ b/Documentation/arch/arm64/elf_hwcaps.rst @@ -178,22 +178,28 @@ HWCAP2_DCPODP Functionality implied by ID_AA64ISAR1_EL1.DPB == 0b0010. HWCAP2_SVE2 - Functionality implied by ID_AA64ZFR0_EL1.SVEver == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.SVEver == 0b0001. HWCAP2_SVEAES - Functionality implied by ID_AA64ZFR0_EL1.AES == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.AES == 0b0001. HWCAP2_SVEPMULL - Functionality implied by ID_AA64ZFR0_EL1.AES == 0b0010. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.AES == 0b0010. HWCAP2_SVEBITPERM - Functionality implied by ID_AA64ZFR0_EL1.BitPerm == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.BitPerm == 0b0001. HWCAP2_SVESHA3 - Functionality implied by ID_AA64ZFR0_EL1.SHA3 == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.SHA3 == 0b0001. HWCAP2_SVESM4 - Functionality implied by ID_AA64ZFR0_EL1.SM4 == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.SM4 == 0b0001. HWCAP2_FLAGM2 Functionality implied by ID_AA64ISAR0_EL1.TS == 0b0010. @@ -202,16 +208,20 @@ HWCAP2_FRINT Functionality implied by ID_AA64ISAR1_EL1.FRINTTS == 0b0001. HWCAP2_SVEI8MM - Functionality implied by ID_AA64ZFR0_EL1.I8MM == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.I8MM == 0b0001. HWCAP2_SVEF32MM - Functionality implied by ID_AA64ZFR0_EL1.F32MM == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.F32MM == 0b0001. HWCAP2_SVEF64MM - Functionality implied by ID_AA64ZFR0_EL1.F64MM == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.F64MM == 0b0001. HWCAP2_SVEBF16 - Functionality implied by ID_AA64ZFR0_EL1.BF16 == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.BF16 == 0b0001. HWCAP2_I8MM Functionality implied by ID_AA64ISAR1_EL1.I8MM == 0b0001. @@ -277,7 +287,8 @@ HWCAP2_EBF16 Functionality implied by ID_AA64ISAR1_EL1.BF16 == 0b0010. HWCAP2_SVE_EBF16 - Functionality implied by ID_AA64ZFR0_EL1.BF16 == 0b0010. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.BF16 == 0b0010. HWCAP2_CSSC Functionality implied by ID_AA64ISAR2_EL1.CSSC == 0b0001. @@ -286,7 +297,8 @@ HWCAP2_RPRFM Functionality implied by ID_AA64ISAR2_EL1.RPRFM == 0b0001. HWCAP2_SVE2P1 - Functionality implied by ID_AA64ZFR0_EL1.SVEver == 0b0010. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.SVEver == 0b0010. HWCAP2_SME2 Functionality implied by ID_AA64SMFR0_EL1.SMEver == 0b0001. @@ -313,7 +325,8 @@ HWCAP2_HBC Functionality implied by ID_AA64ISAR2_EL1.BC == 0b0001. HWCAP2_SVE_B16B16 - Functionality implied by ID_AA64ZFR0_EL1.B16B16 == 0b0001. + Functionality implied by ID_AA64PFR0_EL1.SVE == 0b0001 and + ID_AA64ZFR0_EL1.B16B16 == 0b0001. HWCAP2_LRCPC3 Functionality implied by ID_AA64ISAR1_EL1.LRCPC == 0b0011. diff --git a/arch/arm64/kernel/cpufeature.c b/arch/arm64/kernel/cpufeature.c index b105e6683571..678da3ffaef9 100644 --- a/arch/arm64/kernel/cpufeature.c +++ b/arch/arm64/kernel/cpufeature.c @@ -3008,6 +3008,13 @@ static const struct arm64_cpu_capabilities arm64_features[] = { .matches = match, \ } +#define HWCAP_CAP_MATCH_ID(match, reg, field, min_value, cap_type, cap) \ + { \ + __HWCAP_CAP(#cap, cap_type, cap) \ + HWCAP_CPUID_MATCH(reg, field, min_value) \ + .matches = match, \ + } + #ifdef CONFIG_ARM64_PTR_AUTH static const struct arm64_cpu_capabilities ptr_auth_hwcap_addr_matches[] = { { @@ -3036,6 +3043,13 @@ static const struct arm64_cpu_capabilities ptr_auth_hwcap_gen_matches[] = { }; #endif +#ifdef CONFIG_ARM64_SVE +static bool has_sve_feature(const struct arm64_cpu_capabilities *cap, int scope) +{ + return system_supports_sve() && has_user_cpuid_feature(cap, scope); +} +#endif + static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = { HWCAP_CAP(ID_AA64ISAR0_EL1, AES, PMULL, CAP_HWCAP, KERNEL_HWCAP_PMULL), HWCAP_CAP(ID_AA64ISAR0_EL1, AES, AES, CAP_HWCAP, KERNEL_HWCAP_AES), @@ -3078,19 +3092,19 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = { HWCAP_CAP(ID_AA64MMFR2_EL1, AT, IMP, CAP_HWCAP, KERNEL_HWCAP_USCAT), #ifdef CONFIG_ARM64_SVE HWCAP_CAP(ID_AA64PFR0_EL1, SVE, IMP, CAP_HWCAP, KERNEL_HWCAP_SVE), - HWCAP_CAP(ID_AA64ZFR0_EL1, SVEver, SVE2p1, CAP_HWCAP, KERNEL_HWCAP_SVE2P1), - HWCAP_CAP(ID_AA64ZFR0_EL1, SVEver, SVE2, CAP_HWCAP, KERNEL_HWCAP_SVE2), - HWCAP_CAP(ID_AA64ZFR0_EL1, AES, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEAES), - HWCAP_CAP(ID_AA64ZFR0_EL1, AES, PMULL128, CAP_HWCAP, KERNEL_HWCAP_SVEPMULL), - HWCAP_CAP(ID_AA64ZFR0_EL1, BitPerm, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEBITPERM), - HWCAP_CAP(ID_AA64ZFR0_EL1, B16B16, IMP, CAP_HWCAP, KERNEL_HWCAP_SVE_B16B16), - HWCAP_CAP(ID_AA64ZFR0_EL1, BF16, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEBF16), - HWCAP_CAP(ID_AA64ZFR0_EL1, BF16, EBF16, CAP_HWCAP, KERNEL_HWCAP_SVE_EBF16), - HWCAP_CAP(ID_AA64ZFR0_EL1, SHA3, IMP, CAP_HWCAP, KERNEL_HWCAP_SVESHA3), - HWCAP_CAP(ID_AA64ZFR0_EL1, SM4, IMP, CAP_HWCAP, KERNEL_HWCAP_SVESM4), - HWCAP_CAP(ID_AA64ZFR0_EL1, I8MM, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEI8MM), - HWCAP_CAP(ID_AA64ZFR0_EL1, F32MM, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEF32MM), - HWCAP_CAP(ID_AA64ZFR0_EL1, F64MM, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEF64MM), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, SVEver, SVE2p1, CAP_HWCAP, KERNEL_HWCAP_SVE2P1), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, SVEver, SVE2, CAP_HWCAP, KERNEL_HWCAP_SVE2), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, AES, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEAES), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, AES, PMULL128, CAP_HWCAP, KERNEL_HWCAP_SVEPMULL), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, BitPerm, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEBITPERM), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, B16B16, IMP, CAP_HWCAP, KERNEL_HWCAP_SVE_B16B16), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, BF16, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEBF16), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, BF16, EBF16, CAP_HWCAP, KERNEL_HWCAP_SVE_EBF16), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, SHA3, IMP, CAP_HWCAP, KERNEL_HWCAP_SVESHA3), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, SM4, IMP, CAP_HWCAP, KERNEL_HWCAP_SVESM4), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, I8MM, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEI8MM), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, F32MM, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEF32MM), + HWCAP_CAP_MATCH_ID(has_sve_feature, ID_AA64ZFR0_EL1, F64MM, IMP, CAP_HWCAP, KERNEL_HWCAP_SVEF64MM), #endif #ifdef CONFIG_ARM64_GCS HWCAP_CAP(ID_AA64PFR1_EL1, GCS, IMP, CAP_HWCAP, KERNEL_HWCAP_GCS), |