summaryrefslogtreecommitdiff
path: root/lib/crypto
diff options
context:
space:
mode:
authorLinus Torvalds <torvalds@linux-foundation.org>2023-06-30 21:27:13 -0700
committerLinus Torvalds <torvalds@linux-foundation.org>2023-06-30 21:27:13 -0700
commit5d95ff84e62be914b4a4dabfa814e4096b05b1b0 (patch)
treef2d79d562971025b29deab50bb06e3b865f185b3 /lib/crypto
parentd85a143b69abb4d7544227e26d12c4c7735ab27d (diff)
parent486bfb05913ac9969a3a71a4dc48f17f31cb162d (diff)
Merge tag 'v6.5-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6
Pull crypto updates from Herbert Xu: "API: - Add linear akcipher/sig API - Add tfm cloning (hmac, cmac) - Add statesize to crypto_ahash Algorithms: - Allow only odd e and restrict value in FIPS mode for RSA - Replace LFSR with SHA3-256 in jitter - Add interface for gathering of raw entropy in jitter Drivers: - Fix race on data_avail and actual data in hwrng/virtio - Add hash and HMAC support in starfive - Add RSA algo support in starfive - Add support for PCI device 0x156E in ccp" * tag 'v6.5-p1' of git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6: (85 commits) crypto: akcipher - Do not copy dst if it is NULL crypto: sig - Fix verify call crypto: akcipher - Set request tfm on sync path crypto: sm2 - Provide sm2_compute_z_digest when sm2 is disabled hwrng: imx-rngc - switch to DEFINE_SIMPLE_DEV_PM_OPS hwrng: st - keep clock enabled while hwrng is registered hwrng: st - support compile-testing hwrng: imx-rngc - fix the timeout for init and self check KEYS: asymmetric: Use new crypto interface without scatterlists KEYS: asymmetric: Move sm2 code into x509_public_key KEYS: Add forward declaration in asymmetric-parser.h crypto: sig - Add interface for sign/verify crypto: akcipher - Add sync interface without SG lists crypto: cipher - On clone do crypto_mod_get() crypto: api - Add __crypto_alloc_tfmgfp crypto: api - Remove crypto_init_ops() crypto: rsa - allow only odd e and restrict value in FIPS mode crypto: geniv - Split geniv out of AEAD Kconfig option crypto: algboss - Add missing dependency on RNG2 crypto: starfive - Add RSA algo support ...
Diffstat (limited to 'lib/crypto')
-rw-r--r--lib/crypto/sha256.c77
1 files changed, 18 insertions, 59 deletions
diff --git a/lib/crypto/sha256.c b/lib/crypto/sha256.c
index 72a4b0b1df28..3ac1ef8677db 100644
--- a/lib/crypto/sha256.c
+++ b/lib/crypto/sha256.c
@@ -11,12 +11,11 @@
* Copyright (c) 2014 Red Hat Inc.
*/
-#include <linux/bitops.h>
-#include <linux/export.h>
+#include <asm/unaligned.h>
+#include <crypto/sha256_base.h>
+#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/string.h>
-#include <crypto/sha2.h>
-#include <asm/unaligned.h>
static const u32 SHA256_K[] = {
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
@@ -119,80 +118,40 @@ static void sha256_transform(u32 *state, const u8 *input, u32 *W)
state[4] += e; state[5] += f; state[6] += g; state[7] += h;
}
-void sha256_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
+static void sha256_transform_blocks(struct sha256_state *sctx,
+ const u8 *input, int blocks)
{
- unsigned int partial, done;
- const u8 *src;
u32 W[64];
- partial = sctx->count & 0x3f;
- sctx->count += len;
- done = 0;
- src = data;
-
- if ((partial + len) > 63) {
- if (partial) {
- done = -partial;
- memcpy(sctx->buf + partial, data, done + 64);
- src = sctx->buf;
- }
+ do {
+ sha256_transform(sctx->state, input, W);
+ input += SHA256_BLOCK_SIZE;
+ } while (--blocks);
- do {
- sha256_transform(sctx->state, src, W);
- done += 64;
- src = data + done;
- } while (done + 63 < len);
-
- memzero_explicit(W, sizeof(W));
-
- partial = 0;
- }
- memcpy(sctx->buf + partial, src, len - done);
+ memzero_explicit(W, sizeof(W));
}
-EXPORT_SYMBOL(sha256_update);
-void sha224_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
+void sha256_update(struct sha256_state *sctx, const u8 *data, unsigned int len)
{
- sha256_update(sctx, data, len);
+ lib_sha256_base_do_update(sctx, data, len, sha256_transform_blocks);
}
-EXPORT_SYMBOL(sha224_update);
+EXPORT_SYMBOL(sha256_update);
-static void __sha256_final(struct sha256_state *sctx, u8 *out, int digest_words)
+static void __sha256_final(struct sha256_state *sctx, u8 *out, int digest_size)
{
- __be32 *dst = (__be32 *)out;
- __be64 bits;
- unsigned int index, pad_len;
- int i;
- static const u8 padding[64] = { 0x80, };
-
- /* Save number of bits */
- bits = cpu_to_be64(sctx->count << 3);
-
- /* Pad out to 56 mod 64. */
- index = sctx->count & 0x3f;
- pad_len = (index < 56) ? (56 - index) : ((64+56) - index);
- sha256_update(sctx, padding, pad_len);
-
- /* Append length (before padding) */
- sha256_update(sctx, (const u8 *)&bits, sizeof(bits));
-
- /* Store state in digest */
- for (i = 0; i < digest_words; i++)
- put_unaligned_be32(sctx->state[i], &dst[i]);
-
- /* Zeroize sensitive information. */
- memzero_explicit(sctx, sizeof(*sctx));
+ lib_sha256_base_do_finalize(sctx, sha256_transform_blocks);
+ lib_sha256_base_finish(sctx, out, digest_size);
}
void sha256_final(struct sha256_state *sctx, u8 *out)
{
- __sha256_final(sctx, out, 8);
+ __sha256_final(sctx, out, 32);
}
EXPORT_SYMBOL(sha256_final);
void sha224_final(struct sha256_state *sctx, u8 *out)
{
- __sha256_final(sctx, out, 7);
+ __sha256_final(sctx, out, 28);
}
EXPORT_SYMBOL(sha224_final);