diff options
Diffstat (limited to 'crypto/shash.c')
| -rw-r--r-- | crypto/shash.c | 819 |
1 files changed, 351 insertions, 468 deletions
diff --git a/crypto/shash.c b/crypto/shash.c index 929058a68561..4721f5f134f4 100644 --- a/crypto/shash.c +++ b/crypto/shash.c @@ -1,173 +1,182 @@ +// SPDX-License-Identifier: GPL-2.0-or-later /* * Synchronous Cryptographic Hash operations. * * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> - * - * This program is free software; you can redistribute it and/or modify it - * under the terms of the GNU General Public License as published by the Free - * Software Foundation; either version 2 of the License, or (at your option) - * any later version. - * */ #include <crypto/scatterwalk.h> -#include <crypto/internal/hash.h> +#include <linux/cryptouser.h> #include <linux/err.h> #include <linux/kernel.h> #include <linux/module.h> -#include <linux/slab.h> #include <linux/seq_file.h> -#include <linux/cryptouser.h> +#include <linux/string.h> #include <net/netlink.h> -#include "internal.h" +#include "hash.h" -static const struct crypto_type crypto_shash_type; +static inline bool crypto_shash_block_only(struct crypto_shash *tfm) +{ + return crypto_shash_alg(tfm)->base.cra_flags & + CRYPTO_AHASH_ALG_BLOCK_ONLY; +} -static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, - unsigned int keylen) +static inline bool crypto_shash_final_nonzero(struct crypto_shash *tfm) { - return -ENOSYS; + return crypto_shash_alg(tfm)->base.cra_flags & + CRYPTO_AHASH_ALG_FINAL_NONZERO; } -static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key, - unsigned int keylen) +static inline bool crypto_shash_finup_max(struct crypto_shash *tfm) { - struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned long alignmask = crypto_shash_alignmask(tfm); - unsigned long absize; - u8 *buffer, *alignbuffer; - int err; + return crypto_shash_alg(tfm)->base.cra_flags & + CRYPTO_AHASH_ALG_FINUP_MAX; +} - absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); - buffer = kmalloc(absize, GFP_KERNEL); - if (!buffer) - return -ENOMEM; +int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, + unsigned int keylen) +{ + return -ENOSYS; +} +EXPORT_SYMBOL_GPL(shash_no_setkey); - alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); - memcpy(alignbuffer, key, keylen); - err = shash->setkey(tfm, alignbuffer, keylen); - kzfree(buffer); - return err; +static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg) +{ + if (crypto_shash_alg_needs_key(alg)) + crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); } int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, unsigned int keylen) { struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned long alignmask = crypto_shash_alignmask(tfm); + int err; - if ((unsigned long)key & alignmask) - return shash_setkey_unaligned(tfm, key, keylen); + err = shash->setkey(tfm, key, keylen); + if (unlikely(err)) { + shash_set_needkey(tfm, shash); + return err; + } - return shash->setkey(tfm, key, keylen); + crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); + return 0; } EXPORT_SYMBOL_GPL(crypto_shash_setkey); -static inline unsigned int shash_align_buffer_size(unsigned len, - unsigned long mask) -{ - return len + (mask & ~(__alignof__(u8 __attribute__ ((aligned))) - 1)); -} - -static int shash_update_unaligned(struct shash_desc *desc, const u8 *data, - unsigned int len) +static int __crypto_shash_init(struct shash_desc *desc) { struct crypto_shash *tfm = desc->tfm; - struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned long alignmask = crypto_shash_alignmask(tfm); - unsigned int unaligned_len = alignmask + 1 - - ((unsigned long)data & alignmask); - u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] - __attribute__ ((aligned)); - u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); - int err; - if (unaligned_len > len) - unaligned_len = len; + if (crypto_shash_block_only(tfm)) { + u8 *buf = shash_desc_ctx(desc); - memcpy(buf, data, unaligned_len); - err = shash->update(desc, buf, unaligned_len); - memset(buf, 0, unaligned_len); + buf += crypto_shash_descsize(tfm) - 1; + *buf = 0; + } - return err ?: - shash->update(desc, data + unaligned_len, len - unaligned_len); + return crypto_shash_alg(tfm)->init(desc); } -int crypto_shash_update(struct shash_desc *desc, const u8 *data, - unsigned int len) +int crypto_shash_init(struct shash_desc *desc) { - struct crypto_shash *tfm = desc->tfm; - struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned long alignmask = crypto_shash_alignmask(tfm); + if (crypto_shash_get_flags(desc->tfm) & CRYPTO_TFM_NEED_KEY) + return -ENOKEY; + return __crypto_shash_init(desc); +} +EXPORT_SYMBOL_GPL(crypto_shash_init); - if ((unsigned long)data & alignmask) - return shash_update_unaligned(desc, data, len); +static int shash_default_finup(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) +{ + struct shash_alg *shash = crypto_shash_alg(desc->tfm); - return shash->update(desc, data, len); + return shash->update(desc, data, len) ?: + shash->final(desc, out); } -EXPORT_SYMBOL_GPL(crypto_shash_update); -static int shash_final_unaligned(struct shash_desc *desc, u8 *out) +static int crypto_shash_op_and_zero( + int (*op)(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out), + struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { - struct crypto_shash *tfm = desc->tfm; - unsigned long alignmask = crypto_shash_alignmask(tfm); - struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned int ds = crypto_shash_digestsize(tfm); - u8 ubuf[shash_align_buffer_size(ds, alignmask)] - __attribute__ ((aligned)); - u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); int err; - err = shash->final(desc, buf); - if (err) - goto out; - - memcpy(out, buf, ds); - -out: - memset(buf, 0, ds); + err = op(desc, data, len, out); + memset(shash_desc_ctx(desc), 0, crypto_shash_descsize(desc->tfm)); return err; } -int crypto_shash_final(struct shash_desc *desc, u8 *out) +int crypto_shash_finup(struct shash_desc *restrict desc, const u8 *data, + unsigned int len, u8 *restrict out) { struct crypto_shash *tfm = desc->tfm; - struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned long alignmask = crypto_shash_alignmask(tfm); + u8 *blenp = shash_desc_ctx(desc); + bool finup_max, nonzero; + unsigned int bs; + int err; + u8 *buf; - if ((unsigned long)out & alignmask) - return shash_final_unaligned(desc, out); + if (!crypto_shash_block_only(tfm)) { + if (out) + goto finup; + return crypto_shash_alg(tfm)->update(desc, data, len); + } - return shash->final(desc, out); -} -EXPORT_SYMBOL_GPL(crypto_shash_final); + finup_max = out && crypto_shash_finup_max(tfm); -static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data, - unsigned int len, u8 *out) -{ - return crypto_shash_update(desc, data, len) ?: - crypto_shash_final(desc, out); -} + /* Retain extra block for final nonzero algorithms. */ + nonzero = crypto_shash_final_nonzero(tfm); -int crypto_shash_finup(struct shash_desc *desc, const u8 *data, - unsigned int len, u8 *out) -{ - struct crypto_shash *tfm = desc->tfm; - struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned long alignmask = crypto_shash_alignmask(tfm); + /* + * The partial block buffer follows the algorithm desc context. + * The byte following that contains the length. + */ + blenp += crypto_shash_descsize(tfm) - 1; + bs = crypto_shash_blocksize(tfm); + buf = blenp - bs; + + if (likely(!*blenp && finup_max)) + goto finup; + + while ((*blenp + len) >= bs + nonzero) { + unsigned int nbytes = len - nonzero; + const u8 *src = data; + + if (*blenp) { + memcpy(buf + *blenp, data, bs - *blenp); + nbytes = bs; + src = buf; + } + + err = crypto_shash_alg(tfm)->update(desc, src, nbytes); + if (err < 0) + return err; + + data += nbytes - err - *blenp; + len -= nbytes - err - *blenp; + *blenp = 0; + } - if (((unsigned long)data | (unsigned long)out) & alignmask) - return shash_finup_unaligned(desc, data, len, out); + if (*blenp || !out) { + memcpy(buf + *blenp, data, len); + *blenp += len; + if (!out) + return 0; + data = buf; + len = *blenp; + } - return shash->finup(desc, data, len, out); +finup: + return crypto_shash_op_and_zero(crypto_shash_alg(tfm)->finup, desc, + data, len, out); } EXPORT_SYMBOL_GPL(crypto_shash_finup); -static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data, - unsigned int len, u8 *out) +static int shash_default_digest(struct shash_desc *desc, const u8 *data, + unsigned int len, u8 *out) { - return crypto_shash_init(desc) ?: + return __crypto_shash_init(desc) ?: crypto_shash_finup(desc, data, len, out); } @@ -175,439 +184,336 @@ int crypto_shash_digest(struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) { struct crypto_shash *tfm = desc->tfm; - struct shash_alg *shash = crypto_shash_alg(tfm); - unsigned long alignmask = crypto_shash_alignmask(tfm); - if (((unsigned long)data | (unsigned long)out) & alignmask) - return shash_digest_unaligned(desc, data, len, out); + if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) + return -ENOKEY; - return shash->digest(desc, data, len, out); + return crypto_shash_op_and_zero(crypto_shash_alg(tfm)->digest, desc, + data, len, out); } EXPORT_SYMBOL_GPL(crypto_shash_digest); -static int shash_default_export(struct shash_desc *desc, void *out) +int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data, + unsigned int len, u8 *out) { - memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm)); - return 0; -} + SHASH_DESC_ON_STACK(desc, tfm); -static int shash_default_import(struct shash_desc *desc, const void *in) -{ - memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm)); - return 0; + desc->tfm = tfm; + return crypto_shash_digest(desc, data, len, out); } +EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest); -static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key, - unsigned int keylen) +static int __crypto_shash_export(struct shash_desc *desc, void *out, + int (*export)(struct shash_desc *desc, + void *out)) { - struct crypto_shash **ctx = crypto_ahash_ctx(tfm); + struct crypto_shash *tfm = desc->tfm; + u8 *buf = shash_desc_ctx(desc); + unsigned int plen, ss; + + plen = crypto_shash_blocksize(tfm) + 1; + ss = crypto_shash_statesize(tfm); + if (crypto_shash_block_only(tfm)) + ss -= plen; + if (!export) { + memcpy(out, buf, ss); + return 0; + } - return crypto_shash_setkey(*ctx, key, keylen); + return export(desc, out); } -static int shash_async_init(struct ahash_request *req) +int crypto_shash_export_core(struct shash_desc *desc, void *out) { - struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); - struct shash_desc *desc = ahash_request_ctx(req); - - desc->tfm = *ctx; - desc->flags = req->base.flags; - - return crypto_shash_init(desc); + return __crypto_shash_export(desc, out, + crypto_shash_alg(desc->tfm)->export_core); } +EXPORT_SYMBOL_GPL(crypto_shash_export_core); -int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc) +int crypto_shash_export(struct shash_desc *desc, void *out) { - struct crypto_hash_walk walk; - int nbytes; - - for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; - nbytes = crypto_hash_walk_done(&walk, nbytes)) - nbytes = crypto_shash_update(desc, walk.data, nbytes); - - return nbytes; -} -EXPORT_SYMBOL_GPL(shash_ahash_update); + struct crypto_shash *tfm = desc->tfm; -static int shash_async_update(struct ahash_request *req) -{ - return shash_ahash_update(req, ahash_request_ctx(req)); -} + if (crypto_shash_block_only(tfm)) { + unsigned int plen = crypto_shash_blocksize(tfm) + 1; + unsigned int descsize = crypto_shash_descsize(tfm); + unsigned int ss = crypto_shash_statesize(tfm); + u8 *buf = shash_desc_ctx(desc); -static int shash_async_final(struct ahash_request *req) -{ - return crypto_shash_final(ahash_request_ctx(req), req->result); + memcpy(out + ss - plen, buf + descsize - plen, plen); + } + return __crypto_shash_export(desc, out, crypto_shash_alg(tfm)->export); } +EXPORT_SYMBOL_GPL(crypto_shash_export); -int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) +static int __crypto_shash_import(struct shash_desc *desc, const void *in, + int (*import)(struct shash_desc *desc, + const void *in)) { - struct crypto_hash_walk walk; - int nbytes; - - nbytes = crypto_hash_walk_first(req, &walk); - if (!nbytes) - return crypto_shash_final(desc, req->result); - - do { - nbytes = crypto_hash_walk_last(&walk) ? - crypto_shash_finup(desc, walk.data, nbytes, - req->result) : - crypto_shash_update(desc, walk.data, nbytes); - nbytes = crypto_hash_walk_done(&walk, nbytes); - } while (nbytes > 0); + struct crypto_shash *tfm = desc->tfm; + unsigned int descsize, plen, ss; + u8 *buf = shash_desc_ctx(desc); + + if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) + return -ENOKEY; + + ss = crypto_shash_statesize(tfm); + if (crypto_shash_block_only(tfm)) { + plen = crypto_shash_blocksize(tfm) + 1; + ss -= plen; + descsize = crypto_shash_descsize(tfm); + buf[descsize - 1] = 0; + } + if (!import) { + memcpy(buf, in, ss); + return 0; + } - return nbytes; + return import(desc, in); } -EXPORT_SYMBOL_GPL(shash_ahash_finup); -static int shash_async_finup(struct ahash_request *req) +int crypto_shash_import_core(struct shash_desc *desc, const void *in) { - struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); - struct shash_desc *desc = ahash_request_ctx(req); - - desc->tfm = *ctx; - desc->flags = req->base.flags; - - return shash_ahash_finup(req, desc); + return __crypto_shash_import(desc, in, + crypto_shash_alg(desc->tfm)->import_core); } +EXPORT_SYMBOL_GPL(crypto_shash_import_core); -int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) +int crypto_shash_import(struct shash_desc *desc, const void *in) { - struct scatterlist *sg = req->src; - unsigned int offset = sg->offset; - unsigned int nbytes = req->nbytes; + struct crypto_shash *tfm = desc->tfm; int err; - if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { - void *data; - - data = kmap_atomic(sg_page(sg)); - err = crypto_shash_digest(desc, data + offset, nbytes, - req->result); - kunmap_atomic(data); - crypto_yield(desc->flags); - } else - err = crypto_shash_init(desc) ?: - shash_ahash_finup(req, desc); + err = __crypto_shash_import(desc, in, crypto_shash_alg(tfm)->import); + if (crypto_shash_block_only(tfm)) { + unsigned int plen = crypto_shash_blocksize(tfm) + 1; + unsigned int descsize = crypto_shash_descsize(tfm); + unsigned int ss = crypto_shash_statesize(tfm); + u8 *buf = shash_desc_ctx(desc); + memcpy(buf + descsize - plen, in + ss - plen, plen); + if (buf[descsize - 1] >= plen) + err = -EOVERFLOW; + } return err; } -EXPORT_SYMBOL_GPL(shash_ahash_digest); +EXPORT_SYMBOL_GPL(crypto_shash_import); -static int shash_async_digest(struct ahash_request *req) +static void crypto_shash_exit_tfm(struct crypto_tfm *tfm) { - struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); - struct shash_desc *desc = ahash_request_ctx(req); - - desc->tfm = *ctx; - desc->flags = req->base.flags; + struct crypto_shash *hash = __crypto_shash_cast(tfm); + struct shash_alg *alg = crypto_shash_alg(hash); - return shash_ahash_digest(req, desc); + alg->exit_tfm(hash); } -static int shash_async_export(struct ahash_request *req, void *out) +static int crypto_shash_init_tfm(struct crypto_tfm *tfm) { - return crypto_shash_export(ahash_request_ctx(req), out); -} + struct crypto_shash *hash = __crypto_shash_cast(tfm); + struct shash_alg *alg = crypto_shash_alg(hash); -static int shash_async_import(struct ahash_request *req, const void *in) -{ - struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); - struct shash_desc *desc = ahash_request_ctx(req); + shash_set_needkey(hash, alg); + + if (alg->exit_tfm) + tfm->exit = crypto_shash_exit_tfm; - desc->tfm = *ctx; - desc->flags = req->base.flags; + if (!alg->init_tfm) + return 0; - return crypto_shash_import(desc, in); + return alg->init_tfm(hash); } -static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) +static void crypto_shash_free_instance(struct crypto_instance *inst) { - struct crypto_shash **ctx = crypto_tfm_ctx(tfm); + struct shash_instance *shash = shash_instance(inst); - crypto_free_shash(*ctx); + shash->free(shash); } -int crypto_init_shash_ops_async(struct crypto_tfm *tfm) +static int __maybe_unused crypto_shash_report( + struct sk_buff *skb, struct crypto_alg *alg) { - struct crypto_alg *calg = tfm->__crt_alg; - struct shash_alg *alg = __crypto_shash_alg(calg); - struct crypto_ahash *crt = __crypto_ahash_cast(tfm); - struct crypto_shash **ctx = crypto_tfm_ctx(tfm); - struct crypto_shash *shash; - - if (!crypto_mod_get(calg)) - return -EAGAIN; - - shash = crypto_create_tfm(calg, &crypto_shash_type); - if (IS_ERR(shash)) { - crypto_mod_put(calg); - return PTR_ERR(shash); - } - - *ctx = shash; - tfm->exit = crypto_exit_shash_ops_async; + struct crypto_report_hash rhash; + struct shash_alg *salg = __crypto_shash_alg(alg); - crt->init = shash_async_init; - crt->update = shash_async_update; - crt->final = shash_async_final; - crt->finup = shash_async_finup; - crt->digest = shash_async_digest; + memset(&rhash, 0, sizeof(rhash)); - if (alg->setkey) - crt->setkey = shash_async_setkey; - if (alg->export) - crt->export = shash_async_export; - if (alg->import) - crt->import = shash_async_import; + strscpy(rhash.type, "shash", sizeof(rhash.type)); - crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); + rhash.blocksize = alg->cra_blocksize; + rhash.digestsize = salg->digestsize; - return 0; + return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash); } -static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key, - unsigned int keylen) +static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) + __maybe_unused; +static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) { - struct shash_desc **descp = crypto_hash_ctx(tfm); - struct shash_desc *desc = *descp; + struct shash_alg *salg = __crypto_shash_alg(alg); - return crypto_shash_setkey(desc->tfm, key, keylen); + seq_printf(m, "type : shash\n"); + seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); + seq_printf(m, "digestsize : %u\n", salg->digestsize); } -static int shash_compat_init(struct hash_desc *hdesc) -{ - struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); - struct shash_desc *desc = *descp; - - desc->flags = hdesc->flags; +const struct crypto_type crypto_shash_type = { + .extsize = crypto_alg_extsize, + .init_tfm = crypto_shash_init_tfm, + .free = crypto_shash_free_instance, +#ifdef CONFIG_PROC_FS + .show = crypto_shash_show, +#endif +#if IS_ENABLED(CONFIG_CRYPTO_USER) + .report = crypto_shash_report, +#endif + .maskclear = ~CRYPTO_ALG_TYPE_MASK, + .maskset = CRYPTO_ALG_TYPE_MASK, + .type = CRYPTO_ALG_TYPE_SHASH, + .tfmsize = offsetof(struct crypto_shash, base), + .algsize = offsetof(struct shash_alg, base), +}; - return crypto_shash_init(desc); +int crypto_grab_shash(struct crypto_shash_spawn *spawn, + struct crypto_instance *inst, + const char *name, u32 type, u32 mask) +{ + spawn->base.frontend = &crypto_shash_type; + return crypto_grab_spawn(&spawn->base, inst, name, type, mask); } +EXPORT_SYMBOL_GPL(crypto_grab_shash); -static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, - unsigned int len) +struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, + u32 mask) { - struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); - struct shash_desc *desc = *descp; - struct crypto_hash_walk walk; - int nbytes; - - for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); - nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes)) - nbytes = crypto_shash_update(desc, walk.data, nbytes); - - return nbytes; + return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); } +EXPORT_SYMBOL_GPL(crypto_alloc_shash); -static int shash_compat_final(struct hash_desc *hdesc, u8 *out) +int crypto_has_shash(const char *alg_name, u32 type, u32 mask) { - struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); - - return crypto_shash_final(*descp, out); + return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask); } +EXPORT_SYMBOL_GPL(crypto_has_shash); -static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, - unsigned int nbytes, u8 *out) +struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash) { - unsigned int offset = sg->offset; + struct crypto_tfm *tfm = crypto_shash_tfm(hash); + struct shash_alg *alg = crypto_shash_alg(hash); + struct crypto_shash *nhash; int err; - if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { - struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm); - struct shash_desc *desc = *descp; - void *data; - - desc->flags = hdesc->flags; + if (!crypto_shash_alg_has_setkey(alg)) { + tfm = crypto_tfm_get(tfm); + if (IS_ERR(tfm)) + return ERR_CAST(tfm); - data = kmap_atomic(sg_page(sg)); - err = crypto_shash_digest(desc, data + offset, nbytes, out); - kunmap_atomic(data); - crypto_yield(desc->flags); - goto out; + return hash; } - err = shash_compat_init(hdesc); - if (err) - goto out; - - err = shash_compat_update(hdesc, sg, nbytes); - if (err) - goto out; - - err = shash_compat_final(hdesc, out); - -out: - return err; -} - -static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm) -{ - struct shash_desc **descp = crypto_tfm_ctx(tfm); - struct shash_desc *desc = *descp; - - crypto_free_shash(desc->tfm); - kzfree(desc); -} - -static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm) -{ - struct hash_tfm *crt = &tfm->crt_hash; - struct crypto_alg *calg = tfm->__crt_alg; - struct shash_alg *alg = __crypto_shash_alg(calg); - struct shash_desc **descp = crypto_tfm_ctx(tfm); - struct crypto_shash *shash; - struct shash_desc *desc; - - if (!crypto_mod_get(calg)) - return -EAGAIN; + if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init)) + return ERR_PTR(-ENOSYS); - shash = crypto_create_tfm(calg, &crypto_shash_type); - if (IS_ERR(shash)) { - crypto_mod_put(calg); - return PTR_ERR(shash); - } + nhash = crypto_clone_tfm(&crypto_shash_type, tfm); + if (IS_ERR(nhash)) + return nhash; - desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash), - GFP_KERNEL); - if (!desc) { - crypto_free_shash(shash); - return -ENOMEM; + if (alg->clone_tfm) { + err = alg->clone_tfm(nhash, hash); + if (err) { + crypto_free_shash(nhash); + return ERR_PTR(err); + } } - *descp = desc; - desc->tfm = shash; - tfm->exit = crypto_exit_shash_ops_compat; - - crt->init = shash_compat_init; - crt->update = shash_compat_update; - crt->final = shash_compat_final; - crt->digest = shash_compat_digest; - crt->setkey = shash_compat_setkey; + if (alg->exit_tfm) + crypto_shash_tfm(nhash)->exit = crypto_shash_exit_tfm; - crt->digestsize = alg->digestsize; - - return 0; + return nhash; } +EXPORT_SYMBOL_GPL(crypto_clone_shash); -static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask) +int hash_prepare_alg(struct hash_alg_common *alg) { - switch (mask & CRYPTO_ALG_TYPE_MASK) { - case CRYPTO_ALG_TYPE_HASH_MASK: - return crypto_init_shash_ops_compat(tfm); - } - - return -EINVAL; -} + struct crypto_alg *base = &alg->base; -static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type, - u32 mask) -{ - switch (mask & CRYPTO_ALG_TYPE_MASK) { - case CRYPTO_ALG_TYPE_HASH_MASK: - return sizeof(struct shash_desc *); - } + if (alg->digestsize > HASH_MAX_DIGESTSIZE) + return -EINVAL; - return 0; -} + /* alignmask is not useful for hashes, so it is not supported. */ + if (base->cra_alignmask) + return -EINVAL; -static int crypto_shash_init_tfm(struct crypto_tfm *tfm) -{ - struct crypto_shash *hash = __crypto_shash_cast(tfm); + base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; - hash->descsize = crypto_shash_alg(hash)->descsize; return 0; } -static unsigned int crypto_shash_extsize(struct crypto_alg *alg) -{ - return alg->cra_ctxsize; -} - -#ifdef CONFIG_NET -static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) -{ - struct crypto_report_hash rhash; - struct shash_alg *salg = __crypto_shash_alg(alg); - - strncpy(rhash.type, "shash", sizeof(rhash.type)); - - rhash.blocksize = alg->cra_blocksize; - rhash.digestsize = salg->digestsize; - - if (nla_put(skb, CRYPTOCFGA_REPORT_HASH, - sizeof(struct crypto_report_hash), &rhash)) - goto nla_put_failure; - return 0; - -nla_put_failure: - return -EMSGSIZE; -} -#else -static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg) +static int shash_default_export_core(struct shash_desc *desc, void *out) { return -ENOSYS; } -#endif -static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) - __attribute__ ((unused)); -static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) -{ - struct shash_alg *salg = __crypto_shash_alg(alg); - - seq_printf(m, "type : shash\n"); - seq_printf(m, "blocksize : %u\n", alg->cra_blocksize); - seq_printf(m, "digestsize : %u\n", salg->digestsize); -} - -static const struct crypto_type crypto_shash_type = { - .ctxsize = crypto_shash_ctxsize, - .extsize = crypto_shash_extsize, - .init = crypto_init_shash_ops, - .init_tfm = crypto_shash_init_tfm, -#ifdef CONFIG_PROC_FS - .show = crypto_shash_show, -#endif - .report = crypto_shash_report, - .maskclear = ~CRYPTO_ALG_TYPE_MASK, - .maskset = CRYPTO_ALG_TYPE_MASK, - .type = CRYPTO_ALG_TYPE_SHASH, - .tfmsize = offsetof(struct crypto_shash, base), -}; - -struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, - u32 mask) +static int shash_default_import_core(struct shash_desc *desc, const void *in) { - return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask); + return -ENOSYS; } -EXPORT_SYMBOL_GPL(crypto_alloc_shash); static int shash_prepare_alg(struct shash_alg *alg) { - struct crypto_alg *base = &alg->base; + struct crypto_alg *base = &alg->halg.base; + int err; - if (alg->digestsize > PAGE_SIZE / 8 || - alg->descsize > PAGE_SIZE / 8 || - alg->statesize > PAGE_SIZE / 8) + if ((alg->export && !alg->import) || (alg->import && !alg->export)) return -EINVAL; + err = hash_prepare_alg(&alg->halg); + if (err) + return err; + base->cra_type = &crypto_shash_type; - base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; - + base->cra_flags |= CRYPTO_ALG_REQ_VIRT; + + /* + * Handle missing optional functions. For each one we can either + * install a default here, or we can leave the pointer as NULL and check + * the pointer for NULL in crypto_shash_*(), avoiding an indirect call + * when the default behavior is desired. For ->finup and ->digest we + * install defaults, since for optimal performance algorithms should + * implement these anyway. On the other hand, for ->import and + * ->export the common case and best performance comes from the simple + * memcpy of the shash_desc_ctx, so when those pointers are NULL we + * leave them NULL and provide the memcpy with no indirect call. + */ if (!alg->finup) - alg->finup = shash_finup_unaligned; + alg->finup = shash_default_finup; if (!alg->digest) - alg->digest = shash_digest_unaligned; - if (!alg->export) { - alg->export = shash_default_export; - alg->import = shash_default_import; - alg->statesize = alg->descsize; - } + alg->digest = shash_default_digest; + if (!alg->export && !alg->halg.statesize) + alg->halg.statesize = alg->descsize; if (!alg->setkey) alg->setkey = shash_no_setkey; + if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) { + BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256); + alg->descsize += base->cra_blocksize + 1; + alg->statesize += base->cra_blocksize + 1; + alg->export_core = alg->export; + alg->import_core = alg->import; + } else if (!alg->export_core || !alg->import_core) { + alg->export_core = shash_default_export_core; + alg->import_core = shash_default_import_core; + base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE; + } + + if (alg->descsize > HASH_MAX_DESCSIZE) + return -EINVAL; + if (alg->statesize > HASH_MAX_STATESIZE) + return -EINVAL; + + base->cra_reqsize = sizeof(struct shash_desc) + alg->descsize; + return 0; } @@ -624,9 +530,9 @@ int crypto_register_shash(struct shash_alg *alg) } EXPORT_SYMBOL_GPL(crypto_register_shash); -int crypto_unregister_shash(struct shash_alg *alg) +void crypto_unregister_shash(struct shash_alg *alg) { - return crypto_unregister_alg(&alg->base); + crypto_unregister_alg(&alg->base); } EXPORT_SYMBOL_GPL(crypto_unregister_shash); @@ -650,19 +556,12 @@ err: } EXPORT_SYMBOL_GPL(crypto_register_shashes); -int crypto_unregister_shashes(struct shash_alg *algs, int count) +void crypto_unregister_shashes(struct shash_alg *algs, int count) { - int i, ret; + int i; - for (i = count - 1; i >= 0; --i) { - ret = crypto_unregister_shash(&algs[i]); - if (ret) - pr_err("Failed to unregister %s %s: %d\n", - algs[i].base.cra_driver_name, - algs[i].base.cra_name, ret); - } - - return 0; + for (i = count - 1; i >= 0; --i) + crypto_unregister_shash(&algs[i]); } EXPORT_SYMBOL_GPL(crypto_unregister_shashes); @@ -671,6 +570,9 @@ int shash_register_instance(struct crypto_template *tmpl, { int err; + if (WARN_ON(!inst->free)) + return -EINVAL; + err = shash_prepare_alg(&inst->alg); if (err) return err; @@ -679,31 +581,12 @@ int shash_register_instance(struct crypto_template *tmpl, } EXPORT_SYMBOL_GPL(shash_register_instance); -void shash_free_instance(struct crypto_instance *inst) +void shash_free_singlespawn_instance(struct shash_instance *inst) { - crypto_drop_spawn(crypto_instance_ctx(inst)); - kfree(shash_instance(inst)); -} -EXPORT_SYMBOL_GPL(shash_free_instance); - -int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn, - struct shash_alg *alg, - struct crypto_instance *inst) -{ - return crypto_init_spawn2(&spawn->base, &alg->base, inst, - &crypto_shash_type); -} -EXPORT_SYMBOL_GPL(crypto_init_shash_spawn); - -struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask) -{ - struct crypto_alg *alg; - - alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask); - return IS_ERR(alg) ? ERR_CAST(alg) : - container_of(alg, struct shash_alg, base); + crypto_drop_spawn(shash_instance_ctx(inst)); + kfree(inst); } -EXPORT_SYMBOL_GPL(shash_attr_alg); +EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance); MODULE_LICENSE("GPL"); MODULE_DESCRIPTION("Synchronous cryptographic hash type"); |
