summaryrefslogtreecommitdiff
path: root/crypto/skcipher.c
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/skcipher.c')
-rw-r--r--crypto/skcipher.c158
1 files changed, 149 insertions, 9 deletions
diff --git a/crypto/skcipher.c b/crypto/skcipher.c
index 2a969296bc24..bcf13d95f54a 100644
--- a/crypto/skcipher.c
+++ b/crypto/skcipher.c
@@ -585,6 +585,12 @@ static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg)
return crypto_alg_extsize(alg);
}
+static void skcipher_set_needkey(struct crypto_skcipher *tfm)
+{
+ if (tfm->keysize)
+ crypto_skcipher_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
+}
+
static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm,
const u8 *key, unsigned int keylen)
{
@@ -598,8 +604,10 @@ static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm,
err = crypto_blkcipher_setkey(blkcipher, key, keylen);
crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) &
CRYPTO_TFM_RES_MASK);
- if (err)
+ if (unlikely(err)) {
+ skcipher_set_needkey(tfm);
return err;
+ }
crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
return 0;
@@ -677,8 +685,7 @@ static int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher);
skcipher->keysize = calg->cra_blkcipher.max_keysize;
- if (skcipher->keysize)
- crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_NEED_KEY);
+ skcipher_set_needkey(skcipher);
return 0;
}
@@ -698,8 +705,10 @@ static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm,
crypto_skcipher_set_flags(tfm,
crypto_ablkcipher_get_flags(ablkcipher) &
CRYPTO_TFM_RES_MASK);
- if (err)
+ if (unlikely(err)) {
+ skcipher_set_needkey(tfm);
return err;
+ }
crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
return 0;
@@ -776,8 +785,7 @@ static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
sizeof(struct ablkcipher_request);
skcipher->keysize = calg->cra_ablkcipher.max_keysize;
- if (skcipher->keysize)
- crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_NEED_KEY);
+ skcipher_set_needkey(skcipher);
return 0;
}
@@ -820,8 +828,10 @@ static int skcipher_setkey(struct crypto_skcipher *tfm, const u8 *key,
else
err = cipher->setkey(tfm, key, keylen);
- if (err)
+ if (unlikely(err)) {
+ skcipher_set_needkey(tfm);
return err;
+ }
crypto_skcipher_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
return 0;
@@ -852,8 +862,7 @@ static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm)
skcipher->ivsize = alg->ivsize;
skcipher->keysize = alg->max_keysize;
- if (skcipher->keysize)
- crypto_skcipher_set_flags(skcipher, CRYPTO_TFM_NEED_KEY);
+ skcipher_set_needkey(skcipher);
if (alg->exit)
skcipher->base.exit = crypto_skcipher_exit_tfm;
@@ -1058,5 +1067,136 @@ int skcipher_register_instance(struct crypto_template *tmpl,
}
EXPORT_SYMBOL_GPL(skcipher_register_instance);
+static int skcipher_setkey_simple(struct crypto_skcipher *tfm, const u8 *key,
+ unsigned int keylen)
+{
+ struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
+ int err;
+
+ crypto_cipher_clear_flags(cipher, CRYPTO_TFM_REQ_MASK);
+ crypto_cipher_set_flags(cipher, crypto_skcipher_get_flags(tfm) &
+ CRYPTO_TFM_REQ_MASK);
+ err = crypto_cipher_setkey(cipher, key, keylen);
+ crypto_skcipher_set_flags(tfm, crypto_cipher_get_flags(cipher) &
+ CRYPTO_TFM_RES_MASK);
+ return err;
+}
+
+static int skcipher_init_tfm_simple(struct crypto_skcipher *tfm)
+{
+ struct skcipher_instance *inst = skcipher_alg_instance(tfm);
+ struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
+ struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
+ struct crypto_cipher *cipher;
+
+ cipher = crypto_spawn_cipher(spawn);
+ if (IS_ERR(cipher))
+ return PTR_ERR(cipher);
+
+ ctx->cipher = cipher;
+ return 0;
+}
+
+static void skcipher_exit_tfm_simple(struct crypto_skcipher *tfm)
+{
+ struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
+
+ crypto_free_cipher(ctx->cipher);
+}
+
+static void skcipher_free_instance_simple(struct skcipher_instance *inst)
+{
+ crypto_drop_spawn(skcipher_instance_ctx(inst));
+ kfree(inst);
+}
+
+/**
+ * skcipher_alloc_instance_simple - allocate instance of simple block cipher mode
+ *
+ * Allocate an skcipher_instance for a simple block cipher mode of operation,
+ * e.g. cbc or ecb. The instance context will have just a single crypto_spawn,
+ * that for the underlying cipher. The {min,max}_keysize, ivsize, blocksize,
+ * alignmask, and priority are set from the underlying cipher but can be
+ * overridden if needed. The tfm context defaults to skcipher_ctx_simple, and
+ * default ->setkey(), ->init(), and ->exit() methods are installed.
+ *
+ * @tmpl: the template being instantiated
+ * @tb: the template parameters
+ * @cipher_alg_ret: on success, a pointer to the underlying cipher algorithm is
+ * returned here. It must be dropped with crypto_mod_put().
+ *
+ * Return: a pointer to the new instance, or an ERR_PTR(). The caller still
+ * needs to register the instance.
+ */
+struct skcipher_instance *
+skcipher_alloc_instance_simple(struct crypto_template *tmpl, struct rtattr **tb,
+ struct crypto_alg **cipher_alg_ret)
+{
+ struct crypto_attr_type *algt;
+ struct crypto_alg *cipher_alg;
+ struct skcipher_instance *inst;
+ struct crypto_spawn *spawn;
+ u32 mask;
+ int err;
+
+ algt = crypto_get_attr_type(tb);
+ if (IS_ERR(algt))
+ return ERR_CAST(algt);
+
+ if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask)
+ return ERR_PTR(-EINVAL);
+
+ mask = CRYPTO_ALG_TYPE_MASK |
+ crypto_requires_off(algt->type, algt->mask,
+ CRYPTO_ALG_NEED_FALLBACK);
+
+ cipher_alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, mask);
+ if (IS_ERR(cipher_alg))
+ return ERR_CAST(cipher_alg);
+
+ inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
+ if (!inst) {
+ err = -ENOMEM;
+ goto err_put_cipher_alg;
+ }
+ spawn = skcipher_instance_ctx(inst);
+
+ err = crypto_inst_setname(skcipher_crypto_instance(inst), tmpl->name,
+ cipher_alg);
+ if (err)
+ goto err_free_inst;
+
+ err = crypto_init_spawn(spawn, cipher_alg,
+ skcipher_crypto_instance(inst),
+ CRYPTO_ALG_TYPE_MASK);
+ if (err)
+ goto err_free_inst;
+ inst->free = skcipher_free_instance_simple;
+
+ /* Default algorithm properties, can be overridden */
+ inst->alg.base.cra_blocksize = cipher_alg->cra_blocksize;
+ inst->alg.base.cra_alignmask = cipher_alg->cra_alignmask;
+ inst->alg.base.cra_priority = cipher_alg->cra_priority;
+ inst->alg.min_keysize = cipher_alg->cra_cipher.cia_min_keysize;
+ inst->alg.max_keysize = cipher_alg->cra_cipher.cia_max_keysize;
+ inst->alg.ivsize = cipher_alg->cra_blocksize;
+
+ /* Use skcipher_ctx_simple by default, can be overridden */
+ inst->alg.base.cra_ctxsize = sizeof(struct skcipher_ctx_simple);
+ inst->alg.setkey = skcipher_setkey_simple;
+ inst->alg.init = skcipher_init_tfm_simple;
+ inst->alg.exit = skcipher_exit_tfm_simple;
+
+ *cipher_alg_ret = cipher_alg;
+ return inst;
+
+err_free_inst:
+ kfree(inst);
+err_put_cipher_alg:
+ crypto_mod_put(cipher_alg);
+ return ERR_PTR(err);
+}
+EXPORT_SYMBOL_GPL(skcipher_alloc_instance_simple);
+
MODULE_LICENSE("GPL");
MODULE_DESCRIPTION("Symmetric key cipher type");