diff options
author | Eric Biggers <ebiggers@google.com> | 2025-04-28 10:00:28 -0700 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2025-05-05 18:20:43 +0800 |
commit | 642cfc0680ff9aae73cd87d6fffcc84d9434938b (patch) | |
tree | 90380c305eff07383453bd8aa8926e1599a5a5d0 | |
parent | ca4477e41c68b58043e67bc78074cd6fcc59ee5e (diff) |
crypto: arm64/sha256 - remove obsolete chunking logic
Since kernel-mode NEON sections are now preemptible on arm64, there is
no longer any need to limit the length of them.
Reviewed-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
-rw-r--r-- | arch/arm64/crypto/sha256-glue.c | 19 |
1 files changed, 2 insertions, 17 deletions
diff --git a/arch/arm64/crypto/sha256-glue.c b/arch/arm64/crypto/sha256-glue.c index 26f9fdfae87b..d63ea82e1374 100644 --- a/arch/arm64/crypto/sha256-glue.c +++ b/arch/arm64/crypto/sha256-glue.c @@ -86,23 +86,8 @@ static struct shash_alg algs[] = { { static int sha256_update_neon(struct shash_desc *desc, const u8 *data, unsigned int len) { - do { - unsigned int chunk = len; - - /* - * Don't hog the CPU for the entire time it takes to process all - * input when running on a preemptible kernel, but process the - * data block by block instead. - */ - if (IS_ENABLED(CONFIG_PREEMPTION)) - chunk = SHA256_BLOCK_SIZE; - - chunk -= sha256_base_do_update_blocks(desc, data, chunk, - sha256_neon_transform); - data += chunk; - len -= chunk; - } while (len >= SHA256_BLOCK_SIZE); - return len; + return sha256_base_do_update_blocks(desc, data, len, + sha256_neon_transform); } static int sha256_finup_neon(struct shash_desc *desc, const u8 *data, |