summaryrefslogtreecommitdiff
path: root/net
diff options
context:
space:
mode:
authorVakul Garg <vakul.garg@nxp.com>2019-03-20 02:03:36 +0000
committerDavid S. Miller <davem@davemloft.net>2019-03-20 11:02:05 -0700
commitf295b3ae9f5927e084bd5decdff82390e3471801 (patch)
tree787496d890b276c25f9df4df497c9c5eb1a4f9fd /net
parent6a23c0a6af98c927f387353a219c1f5664bb3d5b (diff)
net/tls: Add support of AES128-CCM based ciphers
Added support for AES128-CCM based record encryption. AES128-CCM is similar to AES128-GCM. Both of them have same salt/iv/mac size. The notable difference between the two is that while invoking AES128-CCM operation, the salt||nonce (which is passed as IV) has to be prefixed with a hardcoded value '2'. Further, CCM implementation in kernel requires IV passed in crypto_aead_request() to be full '16' bytes. Therefore, the record structure 'struct tls_rec' has been modified to reserve '16' bytes for IV. This works for both GCM and CCM based cipher. Signed-off-by: Vakul Garg <vakul.garg@nxp.com> Signed-off-by: David S. Miller <davem@davemloft.net>
Diffstat (limited to 'net')
-rw-r--r--net/tls/tls_main.c31
-rw-r--r--net/tls/tls_sw.c67
2 files changed, 69 insertions, 29 deletions
diff --git a/net/tls/tls_main.c b/net/tls/tls_main.c
index df921a2904b9..0e24edab2535 100644
--- a/net/tls/tls_main.c
+++ b/net/tls/tls_main.c
@@ -469,27 +469,32 @@ static int do_tls_setsockopt_conf(struct sock *sk, char __user *optval,
switch (crypto_info->cipher_type) {
case TLS_CIPHER_AES_GCM_128:
+ optsize = sizeof(struct tls12_crypto_info_aes_gcm_128);
+ break;
case TLS_CIPHER_AES_GCM_256: {
- optsize = crypto_info->cipher_type == TLS_CIPHER_AES_GCM_128 ?
- sizeof(struct tls12_crypto_info_aes_gcm_128) :
- sizeof(struct tls12_crypto_info_aes_gcm_256);
- if (optlen != optsize) {
- rc = -EINVAL;
- goto err_crypto_info;
- }
- rc = copy_from_user(crypto_info + 1, optval + sizeof(*crypto_info),
- optlen - sizeof(*crypto_info));
- if (rc) {
- rc = -EFAULT;
- goto err_crypto_info;
- }
+ optsize = sizeof(struct tls12_crypto_info_aes_gcm_256);
break;
}
+ case TLS_CIPHER_AES_CCM_128:
+ optsize = sizeof(struct tls12_crypto_info_aes_ccm_128);
+ break;
default:
rc = -EINVAL;
goto err_crypto_info;
}
+ if (optlen != optsize) {
+ rc = -EINVAL;
+ goto err_crypto_info;
+ }
+
+ rc = copy_from_user(crypto_info + 1, optval + sizeof(*crypto_info),
+ optlen - sizeof(*crypto_info));
+ if (rc) {
+ rc = -EFAULT;
+ goto err_crypto_info;
+ }
+
if (tx) {
#ifdef CONFIG_TLS_DEVICE
rc = tls_set_device_offload(sk, ctx);
diff --git a/net/tls/tls_sw.c b/net/tls/tls_sw.c
index 425351ac2a9b..f635c103581e 100644
--- a/net/tls/tls_sw.c
+++ b/net/tls/tls_sw.c
@@ -42,8 +42,6 @@
#include <net/strparser.h>
#include <net/tls.h>
-#define MAX_IV_SIZE TLS_CIPHER_AES_GCM_128_IV_SIZE
-
static int __skb_nsg(struct sk_buff *skb, int offset, int len,
unsigned int recursion_level)
{
@@ -479,11 +477,18 @@ static int tls_do_encryption(struct sock *sk,
struct tls_rec *rec = ctx->open_rec;
struct sk_msg *msg_en = &rec->msg_encrypted;
struct scatterlist *sge = sk_msg_elem(msg_en, start);
- int rc;
+ int rc, iv_offset = 0;
+
+ /* For CCM based ciphers, first byte of IV is a constant */
+ if (prot->cipher_type == TLS_CIPHER_AES_CCM_128) {
+ rec->iv_data[0] = TLS_AES_CCM_IV_B0_BYTE;
+ iv_offset = 1;
+ }
+
+ memcpy(&rec->iv_data[iv_offset], tls_ctx->tx.iv,
+ prot->iv_size + prot->salt_size);
- memcpy(rec->iv_data, tls_ctx->tx.iv, sizeof(rec->iv_data));
- xor_iv_with_seq(prot->version, rec->iv_data,
- tls_ctx->tx.rec_seq);
+ xor_iv_with_seq(prot->version, rec->iv_data, tls_ctx->tx.rec_seq);
sge->offset += prot->prepend_size;
sge->length -= prot->prepend_size;
@@ -1344,6 +1349,7 @@ static int decrypt_internal(struct sock *sk, struct sk_buff *skb,
struct scatterlist *sgout = NULL;
const int data_len = rxm->full_len - prot->overhead_size +
prot->tail_size;
+ int iv_offset = 0;
if (*zc && (out_iov || out_sg)) {
if (out_iov)
@@ -1386,18 +1392,25 @@ static int decrypt_internal(struct sock *sk, struct sk_buff *skb,
aad = (u8 *)(sgout + n_sgout);
iv = aad + prot->aad_size;
+ /* For CCM based ciphers, first byte of nonce+iv is always '2' */
+ if (prot->cipher_type == TLS_CIPHER_AES_CCM_128) {
+ iv[0] = 2;
+ iv_offset = 1;
+ }
+
/* Prepare IV */
err = skb_copy_bits(skb, rxm->offset + TLS_HEADER_SIZE,
- iv + TLS_CIPHER_AES_GCM_128_SALT_SIZE,
+ iv + iv_offset + prot->salt_size,
prot->iv_size);
if (err < 0) {
kfree(mem);
return err;
}
if (prot->version == TLS_1_3_VERSION)
- memcpy(iv, tls_ctx->rx.iv, crypto_aead_ivsize(ctx->aead_recv));
+ memcpy(iv + iv_offset, tls_ctx->rx.iv,
+ crypto_aead_ivsize(ctx->aead_recv));
else
- memcpy(iv, tls_ctx->rx.iv, TLS_CIPHER_AES_GCM_128_SALT_SIZE);
+ memcpy(iv + iv_offset, tls_ctx->rx.iv, prot->salt_size);
xor_iv_with_seq(prot->version, iv, tls_ctx->rx.rec_seq);
@@ -2152,14 +2165,15 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
struct tls_crypto_info *crypto_info;
struct tls12_crypto_info_aes_gcm_128 *gcm_128_info;
struct tls12_crypto_info_aes_gcm_256 *gcm_256_info;
+ struct tls12_crypto_info_aes_ccm_128 *ccm_128_info;
struct tls_sw_context_tx *sw_ctx_tx = NULL;
struct tls_sw_context_rx *sw_ctx_rx = NULL;
struct cipher_context *cctx;
struct crypto_aead **aead;
struct strp_callbacks cb;
- u16 nonce_size, tag_size, iv_size, rec_seq_size;
+ u16 nonce_size, tag_size, iv_size, rec_seq_size, salt_size;
struct crypto_tfm *tfm;
- char *iv, *rec_seq, *key, *salt;
+ char *iv, *rec_seq, *key, *salt, *cipher_name;
size_t keysize;
int rc = 0;
@@ -2224,6 +2238,8 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
keysize = TLS_CIPHER_AES_GCM_128_KEY_SIZE;
key = gcm_128_info->key;
salt = gcm_128_info->salt;
+ salt_size = TLS_CIPHER_AES_GCM_128_SALT_SIZE;
+ cipher_name = "gcm(aes)";
break;
}
case TLS_CIPHER_AES_GCM_256: {
@@ -2239,6 +2255,25 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
keysize = TLS_CIPHER_AES_GCM_256_KEY_SIZE;
key = gcm_256_info->key;
salt = gcm_256_info->salt;
+ salt_size = TLS_CIPHER_AES_GCM_256_SALT_SIZE;
+ cipher_name = "gcm(aes)";
+ break;
+ }
+ case TLS_CIPHER_AES_CCM_128: {
+ nonce_size = TLS_CIPHER_AES_CCM_128_IV_SIZE;
+ tag_size = TLS_CIPHER_AES_CCM_128_TAG_SIZE;
+ iv_size = TLS_CIPHER_AES_CCM_128_IV_SIZE;
+ iv = ((struct tls12_crypto_info_aes_ccm_128 *)crypto_info)->iv;
+ rec_seq_size = TLS_CIPHER_AES_CCM_128_REC_SEQ_SIZE;
+ rec_seq =
+ ((struct tls12_crypto_info_aes_ccm_128 *)crypto_info)->rec_seq;
+ ccm_128_info =
+ (struct tls12_crypto_info_aes_ccm_128 *)crypto_info;
+ keysize = TLS_CIPHER_AES_CCM_128_KEY_SIZE;
+ key = ccm_128_info->key;
+ salt = ccm_128_info->salt;
+ salt_size = TLS_CIPHER_AES_CCM_128_SALT_SIZE;
+ cipher_name = "ccm(aes)";
break;
}
default:
@@ -2268,16 +2303,16 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
prot->overhead_size = prot->prepend_size +
prot->tag_size + prot->tail_size;
prot->iv_size = iv_size;
- cctx->iv = kmalloc(iv_size + TLS_CIPHER_AES_GCM_128_SALT_SIZE,
- GFP_KERNEL);
+ prot->salt_size = salt_size;
+ cctx->iv = kmalloc(iv_size + salt_size, GFP_KERNEL);
if (!cctx->iv) {
rc = -ENOMEM;
goto free_priv;
}
/* Note: 128 & 256 bit salt are the same size */
- memcpy(cctx->iv, salt, TLS_CIPHER_AES_GCM_128_SALT_SIZE);
- memcpy(cctx->iv + TLS_CIPHER_AES_GCM_128_SALT_SIZE, iv, iv_size);
prot->rec_seq_size = rec_seq_size;
+ memcpy(cctx->iv, salt, salt_size);
+ memcpy(cctx->iv + salt_size, iv, iv_size);
cctx->rec_seq = kmemdup(rec_seq, rec_seq_size, GFP_KERNEL);
if (!cctx->rec_seq) {
rc = -ENOMEM;
@@ -2285,7 +2320,7 @@ int tls_set_sw_offload(struct sock *sk, struct tls_context *ctx, int tx)
}
if (!*aead) {
- *aead = crypto_alloc_aead("gcm(aes)", 0, 0);
+ *aead = crypto_alloc_aead(cipher_name, 0, 0);
if (IS_ERR(*aead)) {
rc = PTR_ERR(*aead);
*aead = NULL;