diff options
Diffstat (limited to 'drivers/crypto/inside-secure/safexcel_cipher.c')
| -rw-r--r-- | drivers/crypto/inside-secure/safexcel_cipher.c | 238 |
1 files changed, 47 insertions, 191 deletions
diff --git a/drivers/crypto/inside-secure/safexcel_cipher.c b/drivers/crypto/inside-secure/safexcel_cipher.c index d68ef16650d4..919e5a2cab95 100644 --- a/drivers/crypto/inside-secure/safexcel_cipher.c +++ b/drivers/crypto/inside-secure/safexcel_cipher.c @@ -5,7 +5,7 @@ * Antoine Tenart <antoine.tenart@free-electrons.com> */ -#include <asm/unaligned.h> +#include <linux/unaligned.h> #include <linux/device.h> #include <linux/dma-mapping.h> #include <linux/dmapool.h> @@ -63,7 +63,6 @@ struct safexcel_cipher_ctx { u32 hash_alg; u32 state_sz; - struct crypto_cipher *hkaes; struct crypto_aead *fback; }; @@ -642,10 +641,16 @@ static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int rin safexcel_complete(priv, ring); if (src == dst) { - dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); + if (sreq->nr_src > 0) + dma_unmap_sg(priv->dev, src, sreq->nr_src, + DMA_BIDIRECTIONAL); } else { - dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); - dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); + if (sreq->nr_src > 0) + dma_unmap_sg(priv->dev, src, sreq->nr_src, + DMA_TO_DEVICE); + if (sreq->nr_dst > 0) + dma_unmap_sg(priv->dev, dst, sreq->nr_dst, + DMA_FROM_DEVICE); } /* @@ -737,23 +742,32 @@ static int safexcel_send_req(struct crypto_async_request *base, int ring, max(totlen_src, totlen_dst)); return -EINVAL; } - dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); + if (sreq->nr_src > 0 && + !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL)) + return -EIO; } else { if (unlikely(totlen_src && (sreq->nr_src <= 0))) { dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!", totlen_src); return -EINVAL; } - dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); + + if (sreq->nr_src > 0 && + !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE)) + return -EIO; if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) { dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!", totlen_dst); - dma_unmap_sg(priv->dev, src, sreq->nr_src, - DMA_TO_DEVICE); - return -EINVAL; + ret = -EINVAL; + goto unmap; + } + + if (sreq->nr_dst > 0 && + !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) { + ret = -EIO; + goto unmap; } - dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); } memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len); @@ -883,12 +897,18 @@ rdesc_rollback: cdesc_rollback: for (i = 0; i < n_cdesc; i++) safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); - +unmap: if (src == dst) { - dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL); + if (sreq->nr_src > 0) + dma_unmap_sg(priv->dev, src, sreq->nr_src, + DMA_BIDIRECTIONAL); } else { - dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE); - dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE); + if (sreq->nr_src > 0) + dma_unmap_sg(priv->dev, src, sreq->nr_src, + DMA_TO_DEVICE); + if (sreq->nr_dst > 0) + dma_unmap_sg(priv->dev, dst, sreq->nr_dst, + DMA_FROM_DEVICE); } return ret; @@ -1074,13 +1094,12 @@ static int safexcel_aead_send(struct crypto_async_request *async, int ring, static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm, struct crypto_async_request *base, struct safexcel_cipher_req *sreq, - struct safexcel_inv_result *result) + struct crypto_wait *result) { struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); struct safexcel_crypto_priv *priv = ctx->base.priv; int ring = ctx->base.ring; - - init_completion(&result->completion); + int err; ctx = crypto_tfm_ctx(base->tfm); ctx->base.exit_inv = true; @@ -1093,13 +1112,13 @@ static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm, queue_work(priv->ring[ring].workqueue, &priv->ring[ring].work_data.work); - wait_for_completion(&result->completion); + err = crypto_wait_req(-EINPROGRESS, result); - if (result->error) { + if (err) { dev_warn(priv->dev, "cipher: sync: invalidate: completion error %d\n", - result->error); - return result->error; + err); + return err; } return 0; @@ -1109,12 +1128,12 @@ static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm) { EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE); struct safexcel_cipher_req *sreq = skcipher_request_ctx(req); - struct safexcel_inv_result result = {}; + DECLARE_CRYPTO_WAIT(result); memset(req, 0, sizeof(struct skcipher_request)); skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, - safexcel_inv_complete, &result); + crypto_req_done, &result); skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm)); return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); @@ -1124,12 +1143,12 @@ static int safexcel_aead_exit_inv(struct crypto_tfm *tfm) { EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE); struct safexcel_cipher_req *sreq = aead_request_ctx(req); - struct safexcel_inv_result result = {}; + DECLARE_CRYPTO_WAIT(result); memset(req, 0, sizeof(struct aead_request)); aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, - safexcel_inv_complete, &result); + crypto_req_done, &result); aead_request_set_tfm(req, __crypto_aead_cast(tfm)); return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result); @@ -1333,82 +1352,6 @@ struct safexcel_alg_template safexcel_alg_cbc_aes = { }, }; -static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm) -{ - struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); - - safexcel_skcipher_cra_init(tfm); - ctx->alg = SAFEXCEL_AES; - ctx->blocksz = AES_BLOCK_SIZE; - ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB; - return 0; -} - -struct safexcel_alg_template safexcel_alg_cfb_aes = { - .type = SAFEXCEL_ALG_TYPE_SKCIPHER, - .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB, - .alg.skcipher = { - .setkey = safexcel_skcipher_aes_setkey, - .encrypt = safexcel_encrypt, - .decrypt = safexcel_decrypt, - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, - .ivsize = AES_BLOCK_SIZE, - .base = { - .cra_name = "cfb(aes)", - .cra_driver_name = "safexcel-cfb-aes", - .cra_priority = SAFEXCEL_CRA_PRIORITY, - .cra_flags = CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY | - CRYPTO_ALG_KERN_DRIVER_ONLY, - .cra_blocksize = 1, - .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), - .cra_alignmask = 0, - .cra_init = safexcel_skcipher_aes_cfb_cra_init, - .cra_exit = safexcel_skcipher_cra_exit, - .cra_module = THIS_MODULE, - }, - }, -}; - -static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm) -{ - struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); - - safexcel_skcipher_cra_init(tfm); - ctx->alg = SAFEXCEL_AES; - ctx->blocksz = AES_BLOCK_SIZE; - ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB; - return 0; -} - -struct safexcel_alg_template safexcel_alg_ofb_aes = { - .type = SAFEXCEL_ALG_TYPE_SKCIPHER, - .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB, - .alg.skcipher = { - .setkey = safexcel_skcipher_aes_setkey, - .encrypt = safexcel_encrypt, - .decrypt = safexcel_decrypt, - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, - .ivsize = AES_BLOCK_SIZE, - .base = { - .cra_name = "ofb(aes)", - .cra_driver_name = "safexcel-ofb-aes", - .cra_priority = SAFEXCEL_CRA_PRIORITY, - .cra_flags = CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY | - CRYPTO_ALG_KERN_DRIVER_ONLY, - .cra_blocksize = 1, - .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), - .cra_alignmask = 0, - .cra_init = safexcel_skcipher_aes_ofb_cra_init, - .cra_exit = safexcel_skcipher_cra_exit, - .cra_module = THIS_MODULE, - }, - }, -}; - static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm, const u8 *key, unsigned int len) { @@ -2589,15 +2532,8 @@ static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key, ctx->key_len = len; /* Compute hash key by encrypting zeroes with cipher key */ - crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK); - crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) & - CRYPTO_TFM_REQ_MASK); - ret = crypto_cipher_setkey(ctx->hkaes, key, len); - if (ret) - return ret; - memset(hashkey, 0, AES_BLOCK_SIZE); - crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey); + aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey); if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) { for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) { @@ -2626,15 +2562,11 @@ static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm) ctx->xcm = EIP197_XCM_MODE_GCM; ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */ - ctx->hkaes = crypto_alloc_cipher("aes", 0, 0); - return PTR_ERR_OR_ZERO(ctx->hkaes); + return 0; } static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm) { - struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); - - crypto_free_cipher(ctx->hkaes); safexcel_aead_cra_exit(tfm); } @@ -3178,82 +3110,6 @@ struct safexcel_alg_template safexcel_alg_cbc_sm4 = { }, }; -static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm) -{ - struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); - - safexcel_skcipher_cra_init(tfm); - ctx->alg = SAFEXCEL_SM4; - ctx->blocksz = SM4_BLOCK_SIZE; - ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB; - return 0; -} - -struct safexcel_alg_template safexcel_alg_ofb_sm4 = { - .type = SAFEXCEL_ALG_TYPE_SKCIPHER, - .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB, - .alg.skcipher = { - .setkey = safexcel_skcipher_sm4_setkey, - .encrypt = safexcel_encrypt, - .decrypt = safexcel_decrypt, - .min_keysize = SM4_KEY_SIZE, - .max_keysize = SM4_KEY_SIZE, - .ivsize = SM4_BLOCK_SIZE, - .base = { - .cra_name = "ofb(sm4)", - .cra_driver_name = "safexcel-ofb-sm4", - .cra_priority = SAFEXCEL_CRA_PRIORITY, - .cra_flags = CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY | - CRYPTO_ALG_KERN_DRIVER_ONLY, - .cra_blocksize = 1, - .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), - .cra_alignmask = 0, - .cra_init = safexcel_skcipher_sm4_ofb_cra_init, - .cra_exit = safexcel_skcipher_cra_exit, - .cra_module = THIS_MODULE, - }, - }, -}; - -static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm) -{ - struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm); - - safexcel_skcipher_cra_init(tfm); - ctx->alg = SAFEXCEL_SM4; - ctx->blocksz = SM4_BLOCK_SIZE; - ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB; - return 0; -} - -struct safexcel_alg_template safexcel_alg_cfb_sm4 = { - .type = SAFEXCEL_ALG_TYPE_SKCIPHER, - .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB, - .alg.skcipher = { - .setkey = safexcel_skcipher_sm4_setkey, - .encrypt = safexcel_encrypt, - .decrypt = safexcel_decrypt, - .min_keysize = SM4_KEY_SIZE, - .max_keysize = SM4_KEY_SIZE, - .ivsize = SM4_BLOCK_SIZE, - .base = { - .cra_name = "cfb(sm4)", - .cra_driver_name = "safexcel-cfb-sm4", - .cra_priority = SAFEXCEL_CRA_PRIORITY, - .cra_flags = CRYPTO_ALG_ASYNC | - CRYPTO_ALG_ALLOCATES_MEMORY | - CRYPTO_ALG_KERN_DRIVER_ONLY, - .cra_blocksize = 1, - .cra_ctxsize = sizeof(struct safexcel_cipher_ctx), - .cra_alignmask = 0, - .cra_init = safexcel_skcipher_sm4_cfb_cra_init, - .cra_exit = safexcel_skcipher_cra_exit, - .cra_module = THIS_MODULE, - }, - }, -}; - static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm, const u8 *key, unsigned int len) { |
