summaryrefslogtreecommitdiff
path: root/drivers/crypto/amcc/crypto4xx_alg.c
diff options
context:
space:
mode:
Diffstat (limited to 'drivers/crypto/amcc/crypto4xx_alg.c')
-rw-r--r--drivers/crypto/amcc/crypto4xx_alg.c258
1 files changed, 60 insertions, 198 deletions
diff --git a/drivers/crypto/amcc/crypto4xx_alg.c b/drivers/crypto/amcc/crypto4xx_alg.c
index 4092c2aad8e2..38e8a61e9166 100644
--- a/drivers/crypto/amcc/crypto4xx_alg.c
+++ b/drivers/crypto/amcc/crypto4xx_alg.c
@@ -1,19 +1,10 @@
-/**
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
* AMCC SoC PPC4xx Crypto Driver
*
* Copyright (c) 2008 Applied Micro Circuits Corporation.
* All rights reserved. James Hsiao <jhsiao@amcc.com>
*
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
* This file implements the Linux crypto algorithms.
*/
@@ -21,15 +12,12 @@
#include <linux/interrupt.h>
#include <linux/spinlock_types.h>
#include <linux/scatterlist.h>
-#include <linux/crypto.h>
-#include <linux/hash.h>
-#include <crypto/internal/hash.h>
#include <linux/dma-mapping.h>
#include <crypto/algapi.h>
#include <crypto/aead.h>
#include <crypto/aes.h>
#include <crypto/gcm.h>
-#include <crypto/sha.h>
+#include <crypto/sha1.h>
#include <crypto/ctr.h>
#include <crypto/skcipher.h>
#include "crypto4xx_reg_def.h"
@@ -64,7 +52,7 @@ static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
sa->sa_command_1.w = 0;
sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
- sa->sa_command_1.bf.feedback_mode = cfb,
+ sa->sa_command_1.bf.feedback_mode = cfb;
sa->sa_command_1.bf.sa_rev = 1;
sa->sa_command_1.bf.hmac_muting = hmac_mc;
sa->sa_command_1.bf.extended_seq_num = esn;
@@ -76,11 +64,15 @@ static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
}
static inline int crypto4xx_crypt(struct skcipher_request *req,
- const unsigned int ivlen, bool decrypt)
+ const unsigned int ivlen, bool decrypt,
+ bool check_blocksize)
{
struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
- __le32 iv[AES_IV_SIZE];
+ __le32 iv[AES_IV_SIZE / 4];
+
+ if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
+ return -EINVAL;
if (ivlen)
crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
@@ -90,27 +82,37 @@ static inline int crypto4xx_crypt(struct skcipher_request *req,
ctx->sa_len, 0, NULL);
}
-int crypto4xx_encrypt_noiv(struct skcipher_request *req)
+int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
+{
+ return crypto4xx_crypt(req, 0, false, true);
+}
+
+int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
+{
+ return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
+}
+
+int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
{
- return crypto4xx_crypt(req, 0, false);
+ return crypto4xx_crypt(req, 0, true, true);
}
-int crypto4xx_encrypt_iv(struct skcipher_request *req)
+int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
{
- return crypto4xx_crypt(req, AES_IV_SIZE, false);
+ return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
}
-int crypto4xx_decrypt_noiv(struct skcipher_request *req)
+int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
{
- return crypto4xx_crypt(req, 0, true);
+ return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
}
-int crypto4xx_decrypt_iv(struct skcipher_request *req)
+int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
{
- return crypto4xx_crypt(req, AES_IV_SIZE, true);
+ return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
}
-/**
+/*
* AES Functions
*/
static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
@@ -123,12 +125,9 @@ static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
struct dynamic_sa_ctl *sa;
int rc;
- if (keylen != AES_KEYSIZE_256 &&
- keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
- crypto_skcipher_set_flags(cipher,
- CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (keylen != AES_KEYSIZE_256 && keylen != AES_KEYSIZE_192 &&
+ keylen != AES_KEYSIZE_128)
return -EINVAL;
- }
/* Create SA */
if (ctx->sa_in || ctx->sa_out)
@@ -141,9 +140,10 @@ static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
/* Setup SA */
sa = ctx->sa_in;
- set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_CBC ?
- SA_SAVE_IV : SA_NOT_SAVE_IV),
- SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
+ set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
+ SA_NOT_SAVE_IV : SA_SAVE_IV),
+ SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
+ SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
@@ -162,6 +162,11 @@ static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
sa = ctx->sa_out;
sa->sa_command_0.bf.dir = DIR_OUTBOUND;
+ /*
+ * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
+ * it's the DIR_(IN|OUT)BOUND that matters
+ */
+ sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
return 0;
}
@@ -173,13 +178,6 @@ int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
CRYPTO_FEEDBACK_MODE_NO_FB);
}
-int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
- const u8 *key, unsigned int keylen)
-{
- return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
- CRYPTO_FEEDBACK_MODE_128BIT_CFB);
-}
-
int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
const u8 *key, unsigned int keylen)
{
@@ -187,13 +185,6 @@ int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
CRYPTO_FEEDBACK_MODE_NO_FB);
}
-int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
- const u8 *key, unsigned int keylen)
-{
- return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
- CRYPTO_FEEDBACK_MODE_64BIT_OFB);
-}
-
int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
const u8 *key, unsigned int keylen)
{
@@ -258,10 +249,10 @@ crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
* overlow.
*/
if (counter + nblks < counter) {
- struct skcipher_request *subreq = skcipher_request_ctx(req);
+ SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->sw_cipher.cipher);
int ret;
- skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
+ skcipher_request_set_sync_tfm(subreq, ctx->sw_cipher.cipher);
skcipher_request_set_callback(subreq, req->base.flags,
NULL, NULL);
skcipher_request_set_crypt(subreq, req->src, req->dst,
@@ -272,8 +263,8 @@ crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
return ret;
}
- return encrypt ? crypto4xx_encrypt_iv(req)
- : crypto4xx_decrypt_iv(req);
+ return encrypt ? crypto4xx_encrypt_iv_stream(req)
+ : crypto4xx_decrypt_iv_stream(req);
}
static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
@@ -281,19 +272,11 @@ static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
const u8 *key,
unsigned int keylen)
{
- int rc;
-
- crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
+ crypto_sync_skcipher_clear_flags(ctx->sw_cipher.cipher,
CRYPTO_TFM_REQ_MASK);
- crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
+ crypto_sync_skcipher_set_flags(ctx->sw_cipher.cipher,
crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
- rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
- crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
- crypto_skcipher_set_flags(cipher,
- crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
- CRYPTO_TFM_RES_MASK);
-
- return rc;
+ return crypto_sync_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
}
int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
@@ -368,21 +351,13 @@ static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
const u8 *key,
unsigned int keylen)
{
- int rc;
-
crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
crypto_aead_set_flags(ctx->sw_cipher.aead,
crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
- rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
- crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
- crypto_aead_set_flags(cipher,
- crypto_aead_get_flags(ctx->sw_cipher.aead) &
- CRYPTO_TFM_RES_MASK);
-
- return rc;
+ return crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
}
-/**
+/*
* AES-CCM Functions
*/
@@ -451,7 +426,7 @@ static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
struct crypto_aead *aead = crypto_aead_reqtfm(req);
- __le32 iv[16];
+ __le32 iv[4];
u32 tmp_sa[SA_AES128_CCM_LEN + 4];
struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
unsigned int len = req->cryptlen;
@@ -497,7 +472,7 @@ int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
}
-/**
+/*
* AES-GCM Functions
*/
@@ -516,28 +491,20 @@ static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
unsigned int keylen)
{
- struct crypto_cipher *aes_tfm = NULL;
+ struct crypto_aes_ctx ctx;
uint8_t src[16] = { 0 };
- int rc = 0;
-
- aes_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_NEED_FALLBACK);
- if (IS_ERR(aes_tfm)) {
- rc = PTR_ERR(aes_tfm);
- pr_warn("could not load aes cipher driver: %d\n", rc);
- return rc;
- }
+ int rc;
- rc = crypto_cipher_setkey(aes_tfm, key, keylen);
+ rc = aes_expandkey(&ctx, key, keylen);
if (rc) {
- pr_err("setkey() failed: %d\n", rc);
- goto out;
+ pr_err("aes_expandkey() failed: %d\n", rc);
+ return rc;
}
- crypto_cipher_encrypt_one(aes_tfm, src, src);
+ aes_encrypt(&ctx, src, src);
crypto4xx_memcpy_to_le32(hash_start, src, 16);
-out:
- crypto_free_cipher(aes_tfm);
- return rc;
+ memzero_explicit(&ctx, sizeof(ctx));
+ return 0;
}
int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
@@ -548,10 +515,8 @@ int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
struct dynamic_sa_ctl *sa;
int rc = 0;
- if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0) {
- crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
+ if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0)
return -EINVAL;
- }
rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
if (rc)
@@ -634,106 +599,3 @@ int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
{
return crypto4xx_crypt_aes_gcm(req, true);
}
-
-/**
- * HASH SHA1 Functions
- */
-static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
- unsigned int sa_len,
- unsigned char ha,
- unsigned char hm)
-{
- struct crypto_alg *alg = tfm->__crt_alg;
- struct crypto4xx_alg *my_alg;
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
- struct dynamic_sa_hash160 *sa;
- int rc;
-
- my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
- alg.u.hash);
- ctx->dev = my_alg->dev;
-
- /* Create SA */
- if (ctx->sa_in || ctx->sa_out)
- crypto4xx_free_sa(ctx);
-
- rc = crypto4xx_alloc_sa(ctx, sa_len);
- if (rc)
- return rc;
-
- crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
- sizeof(struct crypto4xx_ctx));
- sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
- set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
- SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
- SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
- SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
- SA_OPCODE_HASH, DIR_INBOUND);
- set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
- CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
- SA_SEQ_MASK_OFF, SA_MC_ENABLE,
- SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
- SA_NOT_COPY_HDR);
- /* Need to zero hash digest in SA */
- memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
- memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
-
- return 0;
-}
-
-int crypto4xx_hash_init(struct ahash_request *req)
-{
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
- int ds;
- struct dynamic_sa_ctl *sa;
-
- sa = ctx->sa_in;
- ds = crypto_ahash_digestsize(
- __crypto_ahash_cast(req->base.tfm));
- sa->sa_command_0.bf.digest_len = ds >> 2;
- sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
-
- return 0;
-}
-
-int crypto4xx_hash_update(struct ahash_request *req)
-{
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
- struct scatterlist dst;
- unsigned int ds = crypto_ahash_digestsize(ahash);
-
- sg_init_one(&dst, req->result, ds);
-
- return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
- req->nbytes, NULL, 0, ctx->sa_in,
- ctx->sa_len, 0, NULL);
-}
-
-int crypto4xx_hash_final(struct ahash_request *req)
-{
- return 0;
-}
-
-int crypto4xx_hash_digest(struct ahash_request *req)
-{
- struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
- struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
- struct scatterlist dst;
- unsigned int ds = crypto_ahash_digestsize(ahash);
-
- sg_init_one(&dst, req->result, ds);
-
- return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
- req->nbytes, NULL, 0, ctx->sa_in,
- ctx->sa_len, 0, NULL);
-}
-
-/**
- * SHA1 Algorithm
- */
-int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
-{
- return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
- SA_HASH_MODE_HASH);
-}