summaryrefslogtreecommitdiff
path: root/drivers/crypto/inside-secure/safexcel_cipher.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/inside-secure/safexcel_cipher.c')
-rw-r--r--drivers/crypto/inside-secure/safexcel_cipher.c370
1 files changed, 128 insertions, 242 deletions
diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c
index 0c5e80c3f6e3..919e5a2cab95 100644
--- a/drivers/crypto/inside-secure/safexcel_cipher.c
+++ b/drivers/crypto/inside-secure/safexcel_cipher.c
@@ -5,7 +5,7 @@
* Antoine Tenart <antoine.tenart@free-electrons.com>
*/
-#include <asm/unaligned.h>
+#include <linux/unaligned.h>
#include <linux/device.h>
#include <linux/dma-mapping.h>
#include <linux/dmapool.h>
@@ -18,7 +18,8 @@
#include <crypto/gcm.h>
#include <crypto/ghash.h>
#include <crypto/poly1305.h>
-#include <crypto/sha.h>
+#include <crypto/sha1.h>
+#include <crypto/sha2.h>
#include <crypto/sm3.h>
#include <crypto/sm4.h>
#include <crypto/xts.h>
@@ -61,10 +62,7 @@ struct safexcel_cipher_ctx {
/* All the below is AEAD specific */
u32 hash_alg;
u32 state_sz;
- __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
- __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
- struct crypto_cipher *hkaes;
struct crypto_aead *fback;
};
@@ -375,7 +373,7 @@ static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
{
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
struct crypto_aes_ctx aes;
int ret, i;
@@ -406,11 +404,11 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
{
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_ahash_export_state istate, ostate;
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
struct crypto_authenc_keys keys;
struct crypto_aes_ctx aes;
int err = -EINVAL, i;
+ const char *alg;
if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
goto badkey;
@@ -465,53 +463,37 @@ static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
/* Auth key */
switch (ctx->hash_alg) {
case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
- if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
- keys.authkeylen, &istate, &ostate))
- goto badkey;
+ alg = "safexcel-sha1";
break;
case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
- if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
- keys.authkeylen, &istate, &ostate))
- goto badkey;
+ alg = "safexcel-sha224";
break;
case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
- if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
- keys.authkeylen, &istate, &ostate))
- goto badkey;
+ alg = "safexcel-sha256";
break;
case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
- if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
- keys.authkeylen, &istate, &ostate))
- goto badkey;
+ alg = "safexcel-sha384";
break;
case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
- if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
- keys.authkeylen, &istate, &ostate))
- goto badkey;
+ alg = "safexcel-sha512";
break;
case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
- if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
- keys.authkeylen, &istate, &ostate))
- goto badkey;
+ alg = "safexcel-sm3";
break;
default:
dev_err(priv->dev, "aead: unsupported hash algorithm\n");
goto badkey;
}
- if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
- (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
- memcmp(ctx->opad, ostate.state, ctx->state_sz)))
- ctx->base.needs_inv = true;
+ if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
+ alg, ctx->state_sz))
+ goto badkey;
/* Now copy the keys into the context */
for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
ctx->key_len = keys.enckeylen;
- memcpy(ctx->ipad, &istate.state, ctx->state_sz);
- memcpy(ctx->opad, &ostate.state, ctx->state_sz);
-
memzero_explicit(&keys, sizeof(keys));
return 0;
@@ -525,7 +507,7 @@ static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
struct safexcel_cipher_req *sreq,
struct safexcel_command_desc *cdesc)
{
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ctrl_size = ctx->key_len / sizeof(u32);
cdesc->control_data.control1 = ctx->mode;
@@ -659,10 +641,16 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int rin
safexcel_complete(priv, ring);
if (src == dst) {
- dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
+ if (sreq->nr_src > 0)
+ dma_unmap_sg(priv->dev, src, sreq->nr_src,
+ DMA_BIDIRECTIONAL);
} else {
- dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
- dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
+ if (sreq->nr_src > 0)
+ dma_unmap_sg(priv->dev, src, sreq->nr_src,
+ DMA_TO_DEVICE);
+ if (sreq->nr_dst > 0)
+ dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
+ DMA_FROM_DEVICE);
}
/*
@@ -692,7 +680,7 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring,
struct skcipher_request *areq = skcipher_request_cast(base);
struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
struct safexcel_command_desc *cdesc;
struct safexcel_command_desc *first_cdesc = NULL;
struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
@@ -718,10 +706,10 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring,
totlen_dst += digestsize;
memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
- ctx->ipad, ctx->state_sz);
+ &ctx->base.ipad, ctx->state_sz);
if (!ctx->xcm)
memcpy(ctx->base.ctxr->data + (ctx->key_len +
- ctx->state_sz) / sizeof(u32), ctx->opad,
+ ctx->state_sz) / sizeof(u32), &ctx->base.opad,
ctx->state_sz);
} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
(sreq->direction == SAFEXCEL_DECRYPT)) {
@@ -754,23 +742,32 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring,
max(totlen_src, totlen_dst));
return -EINVAL;
}
- dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
+ if (sreq->nr_src > 0 &&
+ !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
+ return -EIO;
} else {
if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
totlen_src);
return -EINVAL;
}
- dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
+
+ if (sreq->nr_src > 0 &&
+ !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
+ return -EIO;
if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
totlen_dst);
- dma_unmap_sg(priv->dev, src, sreq->nr_src,
- DMA_TO_DEVICE);
- return -EINVAL;
+ ret = -EINVAL;
+ goto unmap;
+ }
+
+ if (sreq->nr_dst > 0 &&
+ !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
+ ret = -EIO;
+ goto unmap;
}
- dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
}
memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
@@ -900,12 +897,18 @@ rdesc_rollback:
cdesc_rollback:
for (i = 0; i < n_cdesc; i++)
safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
-
+unmap:
if (src == dst) {
- dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
+ if (sreq->nr_src > 0)
+ dma_unmap_sg(priv->dev, src, sreq->nr_src,
+ DMA_BIDIRECTIONAL);
} else {
- dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
- dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
+ if (sreq->nr_src > 0)
+ dma_unmap_sg(priv->dev, src, sreq->nr_src,
+ DMA_TO_DEVICE);
+ if (sreq->nr_dst > 0)
+ dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
+ DMA_FROM_DEVICE);
}
return ret;
@@ -1020,7 +1023,7 @@ static int safexcel_cipher_send_inv(struct crypto_async_request *base,
int ring, int *commands, int *results)
{
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ret;
ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
@@ -1039,7 +1042,7 @@ static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
struct skcipher_request *req = skcipher_request_cast(async);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ret;
BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
@@ -1072,7 +1075,7 @@ static int safexcel_aead_send(struct crypto_async_request *async, int ring,
struct crypto_aead *tfm = crypto_aead_reqtfm(req);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
struct safexcel_cipher_req *sreq = aead_request_ctx(req);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ret;
BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
@@ -1091,13 +1094,12 @@ static int safexcel_aead_send(struct crypto_async_request *async, int ring,
static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
struct crypto_async_request *base,
struct safexcel_cipher_req *sreq,
- struct safexcel_inv_result *result)
+ struct crypto_wait *result)
{
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ring = ctx->base.ring;
-
- init_completion(&result->completion);
+ int err;
ctx = crypto_tfm_ctx(base->tfm);
ctx->base.exit_inv = true;
@@ -1110,13 +1112,13 @@ static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
queue_work(priv->ring[ring].workqueue,
&priv->ring[ring].work_data.work);
- wait_for_completion(&result->completion);
+ err = crypto_wait_req(-EINPROGRESS, result);
- if (result->error) {
+ if (err) {
dev_warn(priv->dev,
"cipher: sync: invalidate: completion error %d\n",
- result->error);
- return result->error;
+ err);
+ return err;
}
return 0;
@@ -1126,12 +1128,12 @@ static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
{
EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
- struct safexcel_inv_result result = {};
+ DECLARE_CRYPTO_WAIT(result);
memset(req, 0, sizeof(struct skcipher_request));
skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
- safexcel_inv_complete, &result);
+ crypto_req_done, &result);
skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
@@ -1141,12 +1143,12 @@ static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
{
EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
struct safexcel_cipher_req *sreq = aead_request_ctx(req);
- struct safexcel_inv_result result = {};
+ DECLARE_CRYPTO_WAIT(result);
memset(req, 0, sizeof(struct aead_request));
aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
- safexcel_inv_complete, &result);
+ crypto_req_done, &result);
aead_request_set_tfm(req, __crypto_aead_cast(tfm));
return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
@@ -1157,7 +1159,7 @@ static int safexcel_queue_req(struct crypto_async_request *base,
enum safexcel_cipher_direction dir)
{
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ret, ring;
sreq->needs_inv = false;
@@ -1211,7 +1213,7 @@ static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
sizeof(struct safexcel_cipher_req));
- ctx->priv = tmpl->priv;
+ ctx->base.priv = tmpl->priv;
ctx->base.send = safexcel_skcipher_send;
ctx->base.handle_result = safexcel_skcipher_handle_result;
@@ -1237,7 +1239,7 @@ static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
{
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ret;
if (safexcel_cipher_cra_exit(tfm))
@@ -1257,7 +1259,7 @@ static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
{
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ret;
if (safexcel_cipher_cra_exit(tfm))
@@ -1300,6 +1302,7 @@ struct safexcel_alg_template safexcel_alg_ecb_aes = {
.cra_driver_name = "safexcel-ecb-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1337,6 +1340,7 @@ struct safexcel_alg_template safexcel_alg_cbc_aes = {
.cra_driver_name = "safexcel-cbc-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1348,86 +1352,12 @@ struct safexcel_alg_template safexcel_alg_cbc_aes = {
},
};
-static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
-{
- struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
-
- safexcel_skcipher_cra_init(tfm);
- ctx->alg = SAFEXCEL_AES;
- ctx->blocksz = AES_BLOCK_SIZE;
- ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
- return 0;
-}
-
-struct safexcel_alg_template safexcel_alg_cfb_aes = {
- .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
- .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
- .alg.skcipher = {
- .setkey = safexcel_skcipher_aes_setkey,
- .encrypt = safexcel_encrypt,
- .decrypt = safexcel_decrypt,
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .base = {
- .cra_name = "cfb(aes)",
- .cra_driver_name = "safexcel-cfb-aes",
- .cra_priority = SAFEXCEL_CRA_PRIORITY,
- .cra_flags = CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_KERN_DRIVER_ONLY,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
- .cra_alignmask = 0,
- .cra_init = safexcel_skcipher_aes_cfb_cra_init,
- .cra_exit = safexcel_skcipher_cra_exit,
- .cra_module = THIS_MODULE,
- },
- },
-};
-
-static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
-{
- struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
-
- safexcel_skcipher_cra_init(tfm);
- ctx->alg = SAFEXCEL_AES;
- ctx->blocksz = AES_BLOCK_SIZE;
- ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
- return 0;
-}
-
-struct safexcel_alg_template safexcel_alg_ofb_aes = {
- .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
- .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
- .alg.skcipher = {
- .setkey = safexcel_skcipher_aes_setkey,
- .encrypt = safexcel_encrypt,
- .decrypt = safexcel_decrypt,
- .min_keysize = AES_MIN_KEY_SIZE,
- .max_keysize = AES_MAX_KEY_SIZE,
- .ivsize = AES_BLOCK_SIZE,
- .base = {
- .cra_name = "ofb(aes)",
- .cra_driver_name = "safexcel-ofb-aes",
- .cra_priority = SAFEXCEL_CRA_PRIORITY,
- .cra_flags = CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_KERN_DRIVER_ONLY,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
- .cra_alignmask = 0,
- .cra_init = safexcel_skcipher_aes_ofb_cra_init,
- .cra_exit = safexcel_skcipher_cra_exit,
- .cra_module = THIS_MODULE,
- },
- },
-};
-
static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
const u8 *key, unsigned int len)
{
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
struct crypto_aes_ctx aes;
int ret, i;
unsigned int keylen;
@@ -1485,6 +1415,7 @@ struct safexcel_alg_template safexcel_alg_ctr_aes = {
.cra_driver_name = "safexcel-ctr-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1500,7 +1431,7 @@ static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
unsigned int len)
{
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int ret;
ret = verify_skcipher_des_key(ctfm, key);
@@ -1545,6 +1476,7 @@ struct safexcel_alg_template safexcel_alg_cbc_des = {
.cra_driver_name = "safexcel-cbc-des",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1582,6 +1514,7 @@ struct safexcel_alg_template safexcel_alg_ecb_des = {
.cra_driver_name = "safexcel-ecb-des",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1597,7 +1530,7 @@ static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
const u8 *key, unsigned int len)
{
struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
int err;
err = verify_skcipher_des3_key(ctfm, key);
@@ -1642,6 +1575,7 @@ struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
.cra_driver_name = "safexcel-cbc-des3_ede",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1679,6 +1613,7 @@ struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
.cra_driver_name = "safexcel-ecb-des3_ede",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1714,7 +1649,7 @@ static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
sizeof(struct safexcel_cipher_req));
- ctx->priv = tmpl->priv;
+ ctx->base.priv = tmpl->priv;
ctx->alg = SAFEXCEL_AES; /* default */
ctx->blocksz = AES_BLOCK_SIZE;
@@ -1751,6 +1686,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1786,6 +1722,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1821,6 +1758,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1856,6 +1794,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1891,6 +1830,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1927,6 +1867,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1963,6 +1904,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -1999,6 +1941,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2035,6 +1978,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2071,6 +2015,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES3_EDE_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2107,6 +2052,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2143,6 +2089,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2179,6 +2126,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2215,6 +2163,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2251,6 +2200,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = DES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2285,6 +2235,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2319,6 +2270,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2353,6 +2305,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2387,6 +2340,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2421,6 +2375,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2437,7 +2392,7 @@ static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
{
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
struct crypto_aes_ctx aes;
int ret, i;
unsigned int keylen;
@@ -2534,6 +2489,7 @@ struct safexcel_alg_template safexcel_alg_xts_aes = {
.cra_driver_name = "safexcel-xts-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = XTS_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2550,7 +2506,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
{
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
struct crypto_aes_ctx aes;
u32 hashkey[AES_BLOCK_SIZE >> 2];
int ret, i;
@@ -2576,19 +2532,12 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
ctx->key_len = len;
/* Compute hash key by encrypting zeroes with cipher key */
- crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
- crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
- CRYPTO_TFM_REQ_MASK);
- ret = crypto_cipher_setkey(ctx->hkaes, key, len);
- if (ret)
- return ret;
-
memset(hashkey, 0, AES_BLOCK_SIZE);
- crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
+ aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
- if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
+ if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
ctx->base.needs_inv = true;
break;
}
@@ -2596,7 +2545,7 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
}
for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
- ctx->ipad[i] = cpu_to_be32(hashkey[i]);
+ ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
memzero_explicit(hashkey, AES_BLOCK_SIZE);
memzero_explicit(&aes, sizeof(aes));
@@ -2613,15 +2562,11 @@ static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
ctx->xcm = EIP197_XCM_MODE_GCM;
ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
- ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
- return PTR_ERR_OR_ZERO(ctx->hkaes);
+ return 0;
}
static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
{
- struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
-
- crypto_free_cipher(ctx->hkaes);
safexcel_aead_cra_exit(tfm);
}
@@ -2646,6 +2591,7 @@ struct safexcel_alg_template safexcel_alg_gcm = {
.cra_driver_name = "safexcel-gcm-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2662,7 +2608,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
{
struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
struct crypto_aes_ctx aes;
int ret, i;
@@ -2683,7 +2629,7 @@ static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
for (i = 0; i < len / sizeof(u32); i++) {
ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
- ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
+ ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
cpu_to_be32(aes.key_enc[i]);
}
@@ -2769,6 +2715,7 @@ struct safexcel_alg_template safexcel_alg_ccm = {
.cra_driver_name = "safexcel-ccm-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2783,7 +2730,7 @@ struct safexcel_alg_template safexcel_alg_ccm = {
static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
const u8 *key)
{
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
@@ -2832,6 +2779,7 @@ struct safexcel_alg_template safexcel_alg_chacha20 = {
.cra_driver_name = "safexcel-chacha20",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -2993,6 +2941,7 @@ struct safexcel_alg_template safexcel_alg_chachapoly = {
/* +1 to put it above HW chacha + SW poly */
.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = 1,
@@ -3032,6 +2981,7 @@ struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
/* +1 to put it above HW chacha + SW poly */
.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = 1,
@@ -3049,7 +2999,7 @@ static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
{
struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
- struct safexcel_crypto_priv *priv = ctx->priv;
+ struct safexcel_crypto_priv *priv = ctx->base.priv;
if (len != SM4_KEY_SIZE)
return -EINVAL;
@@ -3110,6 +3060,7 @@ struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
.cra_driver_name = "safexcel-ecb-sm4",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3147,6 +3098,7 @@ struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
.cra_driver_name = "safexcel-cbc-sm4",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3158,80 +3110,6 @@ struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
},
};
-static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
-{
- struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
-
- safexcel_skcipher_cra_init(tfm);
- ctx->alg = SAFEXCEL_SM4;
- ctx->blocksz = SM4_BLOCK_SIZE;
- ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
- return 0;
-}
-
-struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
- .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
- .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
- .alg.skcipher = {
- .setkey = safexcel_skcipher_sm4_setkey,
- .encrypt = safexcel_encrypt,
- .decrypt = safexcel_decrypt,
- .min_keysize = SM4_KEY_SIZE,
- .max_keysize = SM4_KEY_SIZE,
- .ivsize = SM4_BLOCK_SIZE,
- .base = {
- .cra_name = "ofb(sm4)",
- .cra_driver_name = "safexcel-ofb-sm4",
- .cra_priority = SAFEXCEL_CRA_PRIORITY,
- .cra_flags = CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_KERN_DRIVER_ONLY,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
- .cra_alignmask = 0,
- .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
- .cra_exit = safexcel_skcipher_cra_exit,
- .cra_module = THIS_MODULE,
- },
- },
-};
-
-static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
-{
- struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
-
- safexcel_skcipher_cra_init(tfm);
- ctx->alg = SAFEXCEL_SM4;
- ctx->blocksz = SM4_BLOCK_SIZE;
- ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
- return 0;
-}
-
-struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
- .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
- .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
- .alg.skcipher = {
- .setkey = safexcel_skcipher_sm4_setkey,
- .encrypt = safexcel_encrypt,
- .decrypt = safexcel_decrypt,
- .min_keysize = SM4_KEY_SIZE,
- .max_keysize = SM4_KEY_SIZE,
- .ivsize = SM4_BLOCK_SIZE,
- .base = {
- .cra_name = "cfb(sm4)",
- .cra_driver_name = "safexcel-cfb-sm4",
- .cra_priority = SAFEXCEL_CRA_PRIORITY,
- .cra_flags = CRYPTO_ALG_ASYNC |
- CRYPTO_ALG_KERN_DRIVER_ONLY,
- .cra_blocksize = 1,
- .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
- .cra_alignmask = 0,
- .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
- .cra_exit = safexcel_skcipher_cra_exit,
- .cra_module = THIS_MODULE,
- },
- },
-};
-
static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
const u8 *key, unsigned int len)
{
@@ -3273,6 +3151,7 @@ struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
.cra_driver_name = "safexcel-ctr-sm4",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3332,6 +3211,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = SM4_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3441,6 +3321,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY |
CRYPTO_ALG_NEED_FALLBACK,
.cra_blocksize = SM4_BLOCK_SIZE,
@@ -3476,6 +3357,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3510,6 +3392,7 @@ struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3578,6 +3461,7 @@ struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
.cra_driver_name = "safexcel-rfc4106-gcm-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3622,6 +3506,7 @@ struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
.cra_driver_name = "safexcel-rfc4543-gcm-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
@@ -3713,6 +3598,7 @@ struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
.cra_driver_name = "safexcel-rfc4309-ccm-aes",
.cra_priority = SAFEXCEL_CRA_PRIORITY,
.cra_flags = CRYPTO_ALG_ASYNC |
+ CRYPTO_ALG_ALLOCATES_MEMORY |
CRYPTO_ALG_KERN_DRIVER_ONLY,
.cra_blocksize = 1,
.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),