summaryrefslogtreecommitdiff
path: root/arch/arm64/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2023-10-09 23:41:26 -0700
committerHerbert Xu <herbert@gondor.apana.org.au>2023-10-20 13:39:26 +0800
commit455951b5e172bbb0eb1d54d4fd9c1cfef732e210 (patch)
tree9717c619fd976e6f40da007b764a69a944004e1f /arch/arm64/crypto
parent5f720a3df346548db69fe3bc0bef44a16eda4513 (diff)
crypto: arm64/sha256 - clean up backwards function names
In the Linux kernel, a function whose name has two leading underscores is conventionally called by the same-named function without leading underscores -- not the other way around. __sha256_block_data_order() and __sha256_block_neon() got this backwards. Fix this, albeit without changing the names in the perlasm since that is OpenSSL code. No change in behavior. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/arm64/crypto')
-rw-r--r--arch/arm64/crypto/sha256-glue.c26
1 files changed, 12 insertions, 14 deletions
diff --git a/arch/arm64/crypto/sha256-glue.c b/arch/arm64/crypto/sha256-glue.c
index 9b5c86e07a9a..35356987cc1e 100644
--- a/arch/arm64/crypto/sha256-glue.c
+++ b/arch/arm64/crypto/sha256-glue.c
@@ -27,8 +27,8 @@ asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
unsigned int num_blks);
EXPORT_SYMBOL(sha256_block_data_order);
-static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
- int blocks)
+static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src,
+ int blocks)
{
sha256_block_data_order(sst->state, src, blocks);
}
@@ -36,8 +36,8 @@ static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
asmlinkage void sha256_block_neon(u32 *digest, const void *data,
unsigned int num_blks);
-static void __sha256_block_neon(struct sha256_state *sst, u8 const *src,
- int blocks)
+static void sha256_neon_transform(struct sha256_state *sst, u8 const *src,
+ int blocks)
{
sha256_block_neon(sst->state, src, blocks);
}
@@ -45,17 +45,15 @@ static void __sha256_block_neon(struct sha256_state *sst, u8 const *src,
static int crypto_sha256_arm64_update(struct shash_desc *desc, const u8 *data,
unsigned int len)
{
- return sha256_base_do_update(desc, data, len,
- __sha256_block_data_order);
+ return sha256_base_do_update(desc, data, len, sha256_arm64_transform);
}
static int crypto_sha256_arm64_finup(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
if (len)
- sha256_base_do_update(desc, data, len,
- __sha256_block_data_order);
- sha256_base_do_finalize(desc, __sha256_block_data_order);
+ sha256_base_do_update(desc, data, len, sha256_arm64_transform);
+ sha256_base_do_finalize(desc, sha256_arm64_transform);
return sha256_base_finish(desc, out);
}
@@ -98,7 +96,7 @@ static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
if (!crypto_simd_usable())
return sha256_base_do_update(desc, data, len,
- __sha256_block_data_order);
+ sha256_arm64_transform);
while (len > 0) {
unsigned int chunk = len;
@@ -114,7 +112,7 @@ static int sha256_update_neon(struct shash_desc *desc, const u8 *data,
sctx->count % SHA256_BLOCK_SIZE;
kernel_neon_begin();
- sha256_base_do_update(desc, data, chunk, __sha256_block_neon);
+ sha256_base_do_update(desc, data, chunk, sha256_neon_transform);
kernel_neon_end();
data += chunk;
len -= chunk;
@@ -128,13 +126,13 @@ static int sha256_finup_neon(struct shash_desc *desc, const u8 *data,
if (!crypto_simd_usable()) {
if (len)
sha256_base_do_update(desc, data, len,
- __sha256_block_data_order);
- sha256_base_do_finalize(desc, __sha256_block_data_order);
+ sha256_arm64_transform);
+ sha256_base_do_finalize(desc, sha256_arm64_transform);
} else {
if (len)
sha256_update_neon(desc, data, len);
kernel_neon_begin();
- sha256_base_do_finalize(desc, __sha256_block_neon);
+ sha256_base_do_finalize(desc, sha256_neon_transform);
kernel_neon_end();
}
return sha256_base_finish(desc, out);