summaryrefslogtreecommitdiff
path: root/arch/arm/lib/crc32.c
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2025-05-26 13:32:06 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2025-05-26 13:32:06 -0700
commit15d90a5e5524532b7456a24f4626cf28c1629c4c (patch)
tree9f819198d724af6fd0c46fda0013a29ede2c16a0 /arch/arm/lib/crc32.c
parent14f19dc6440f23f417c83207c117b54698aa3934 (diff)
parent289c99bec7eed918ab37c62cbb29a2e3f58fb1fb (diff)
Merge tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux
Pull CRC updates from Eric Biggers: "Cleanups for the kernel's CRC (cyclic redundancy check) code: - Use __ro_after_init where appropriate - Remove unnecessary static_key on s390 - Rename some source code files - Rename the crc32 and crc32c crypto API modules - Use subsys_initcall instead of arch_initcall - Restore maintainers for crc_kunit.c - Fold crc16_byte() into crc16.c - Add some SPDX license identifiers" * tag 'crc-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/ebiggers/linux: lib/crc32: add SPDX license identifier lib/crc16: unexport crc16_table and crc16_byte() w1: ds2406: use crc16() instead of crc16_byte() loop MAINTAINERS: add crc_kunit.c back to CRC LIBRARY lib/crc: make arch-optimized code use subsys_initcall crypto: crc32 - remove "generic" from file and module names x86/crc: drop "glue" from filenames sparc/crc: drop "glue" from filenames s390/crc: drop "glue" from filenames powerpc/crc: rename crc32-vpmsum_core.S to crc-vpmsum-template.S powerpc/crc: drop "glue" from filenames arm64/crc: drop "glue" from filenames arm/crc: drop "glue" from filenames s390/crc32: Remove no-op module init and exit functions s390/crc32: Remove have_vxrs static key lib/crc: make the CPU feature static keys __ro_after_init
Diffstat (limited to 'arch/arm/lib/crc32.c')
-rw-r--r--arch/arm/lib/crc32.c123
1 files changed, 123 insertions, 0 deletions
diff --git a/arch/arm/lib/crc32.c b/arch/arm/lib/crc32.c
new file mode 100644
index 000000000000..f2bef8849c7c
--- /dev/null
+++ b/arch/arm/lib/crc32.c
@@ -0,0 +1,123 @@
+// SPDX-License-Identifier: GPL-2.0-only
+/*
+ * Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions instructions
+ *
+ * Copyright (C) 2016 Linaro Ltd <ard.biesheuvel@linaro.org>
+ */
+
+#include <linux/cpufeature.h>
+#include <linux/crc32.h>
+#include <linux/init.h>
+#include <linux/kernel.h>
+#include <linux/module.h>
+#include <linux/string.h>
+
+#include <crypto/internal/simd.h>
+
+#include <asm/hwcap.h>
+#include <asm/neon.h>
+#include <asm/simd.h>
+
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_crc32);
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_pmull);
+
+#define PMULL_MIN_LEN 64 /* min size of buffer for pmull functions */
+
+asmlinkage u32 crc32_pmull_le(const u8 buf[], u32 len, u32 init_crc);
+asmlinkage u32 crc32_armv8_le(u32 init_crc, const u8 buf[], u32 len);
+
+asmlinkage u32 crc32c_pmull_le(const u8 buf[], u32 len, u32 init_crc);
+asmlinkage u32 crc32c_armv8_le(u32 init_crc, const u8 buf[], u32 len);
+
+static u32 crc32_le_scalar(u32 crc, const u8 *p, size_t len)
+{
+ if (static_branch_likely(&have_crc32))
+ return crc32_armv8_le(crc, p, len);
+ return crc32_le_base(crc, p, len);
+}
+
+u32 crc32_le_arch(u32 crc, const u8 *p, size_t len)
+{
+ if (len >= PMULL_MIN_LEN + 15 &&
+ static_branch_likely(&have_pmull) && crypto_simd_usable()) {
+ size_t n = -(uintptr_t)p & 15;
+
+ /* align p to 16-byte boundary */
+ if (n) {
+ crc = crc32_le_scalar(crc, p, n);
+ p += n;
+ len -= n;
+ }
+ n = round_down(len, 16);
+ kernel_neon_begin();
+ crc = crc32_pmull_le(p, n, crc);
+ kernel_neon_end();
+ p += n;
+ len -= n;
+ }
+ return crc32_le_scalar(crc, p, len);
+}
+EXPORT_SYMBOL(crc32_le_arch);
+
+static u32 crc32c_scalar(u32 crc, const u8 *p, size_t len)
+{
+ if (static_branch_likely(&have_crc32))
+ return crc32c_armv8_le(crc, p, len);
+ return crc32c_base(crc, p, len);
+}
+
+u32 crc32c_arch(u32 crc, const u8 *p, size_t len)
+{
+ if (len >= PMULL_MIN_LEN + 15 &&
+ static_branch_likely(&have_pmull) && crypto_simd_usable()) {
+ size_t n = -(uintptr_t)p & 15;
+
+ /* align p to 16-byte boundary */
+ if (n) {
+ crc = crc32c_scalar(crc, p, n);
+ p += n;
+ len -= n;
+ }
+ n = round_down(len, 16);
+ kernel_neon_begin();
+ crc = crc32c_pmull_le(p, n, crc);
+ kernel_neon_end();
+ p += n;
+ len -= n;
+ }
+ return crc32c_scalar(crc, p, len);
+}
+EXPORT_SYMBOL(crc32c_arch);
+
+u32 crc32_be_arch(u32 crc, const u8 *p, size_t len)
+{
+ return crc32_be_base(crc, p, len);
+}
+EXPORT_SYMBOL(crc32_be_arch);
+
+static int __init crc32_arm_init(void)
+{
+ if (elf_hwcap2 & HWCAP2_CRC32)
+ static_branch_enable(&have_crc32);
+ if (elf_hwcap2 & HWCAP2_PMULL)
+ static_branch_enable(&have_pmull);
+ return 0;
+}
+subsys_initcall(crc32_arm_init);
+
+static void __exit crc32_arm_exit(void)
+{
+}
+module_exit(crc32_arm_exit);
+
+u32 crc32_optimizations(void)
+{
+ if (elf_hwcap2 & (HWCAP2_CRC32 | HWCAP2_PMULL))
+ return CRC32_LE_OPTIMIZATION | CRC32C_OPTIMIZATION;
+ return 0;
+}
+EXPORT_SYMBOL(crc32_optimizations);
+
+MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
+MODULE_DESCRIPTION("Accelerated CRC32(C) using ARM CRC, NEON and Crypto Extensions");
+MODULE_LICENSE("GPL v2");