summaryrefslogtreecommitdiff
path: root/crypto/shash.c
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2023-10-09 00:32:13 -0700
committerHerbert Xu <herbert@gondor.apana.org.au>2023-10-20 13:39:25 +0800
commit313a4074d78fc9b90c93c9298e9f90d86a144231 (patch)
tree4438c17ec79e16795e20606409cd910cec915d67 /crypto/shash.c
parentbb40d32689d73c46de39a0529d551f523f21dc9b (diff)
crypto: shash - optimize the default digest and finup
For an shash algorithm that doesn't implement ->digest, currently crypto_shash_digest() with aligned input makes 5 indirect calls: 1 to shash_digest_unaligned(), 1 to ->init, 2 to ->update ('alignmask + 1' bytes, then the rest), then 1 to ->final. This is true even if the algorithm implements ->finup. This is caused by an unnecessary fallback to code meant to handle unaligned inputs. In fact, crypto_shash_digest() already does the needed alignment check earlier. Therefore, optimize the number of indirect calls for aligned inputs to 3 when the algorithm implements ->finup. It remains at 5 when the algorithm implements neither ->finup nor ->digest. Similarly, for an shash algorithm that doesn't implement ->finup, currently crypto_shash_finup() with aligned input makes 4 indirect calls: 1 to shash_finup_unaligned(), 2 to ->update, and 1 to ->final. Optimize this to 3 calls. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/shash.c')
-rw-r--r--crypto/shash.c22
1 files changed, 20 insertions, 2 deletions
diff --git a/crypto/shash.c b/crypto/shash.c
index 1fadb6b59bdc..d99dc2f94c65 100644
--- a/crypto/shash.c
+++ b/crypto/shash.c
@@ -191,6 +191,15 @@ static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
shash_final_unaligned(desc, out);
}
+static int shash_default_finup(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ struct shash_alg *shash = crypto_shash_alg(desc->tfm);
+
+ return shash->update(desc, data, len) ?:
+ shash->final(desc, out);
+}
+
int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
@@ -224,6 +233,15 @@ static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
shash_final_unaligned(desc, out);
}
+static int shash_default_digest(struct shash_desc *desc, const u8 *data,
+ unsigned int len, u8 *out)
+{
+ struct shash_alg *shash = crypto_shash_alg(desc->tfm);
+
+ return shash->init(desc) ?:
+ shash->finup(desc, data, len, out);
+}
+
int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
unsigned int len, u8 *out)
{
@@ -656,9 +674,9 @@ static int shash_prepare_alg(struct shash_alg *alg)
base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
if (!alg->finup)
- alg->finup = shash_finup_unaligned;
+ alg->finup = shash_default_finup;
if (!alg->digest)
- alg->digest = shash_digest_unaligned;
+ alg->digest = shash_default_digest;
if (!alg->export) {
alg->export = shash_default_export;
alg->import = shash_default_import;