summaryrefslogtreecommitdiff
path: root/crypto/cipher.c
blob: 47c77a3e59783320f22ea7bf4b771b0102f5b73e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
// SPDX-License-Identifier: GPL-2.0-or-later
/*
 * Cryptographic API.
 *
 * Single-block cipher operations.
 *
 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
 */

#include <crypto/algapi.h>
#include <crypto/internal/cipher.h>
#include <linux/kernel.h>
#include <linux/crypto.h>
#include <linux/errno.h>
#include <linux/slab.h>
#include <linux/string.h>
#include "internal.h"

static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
			    unsigned int keylen)
{
	struct cipher_alg *cia = crypto_cipher_alg(tfm);
	unsigned long alignmask = crypto_cipher_alignmask(tfm);
	int ret;
	u8 *buffer, *alignbuffer;
	unsigned long absize;

	absize = keylen + alignmask;
	buffer = kmalloc(absize, GFP_ATOMIC);
	if (!buffer)
		return -ENOMEM;

	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
	memcpy(alignbuffer, key, keylen);
	ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
	memset(alignbuffer, 0, keylen);
	kfree(buffer);
	return ret;

}

int crypto_cipher_setkey(struct crypto_cipher *tfm,
			 const u8 *key, unsigned int keylen)
{
	struct cipher_alg *cia = crypto_cipher_alg(tfm);
	unsigned long alignmask = crypto_cipher_alignmask(tfm);

	if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
		return -EINVAL;

	if ((unsigned long)key & alignmask)
		return setkey_unaligned(tfm, key, keylen);

	return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
}
EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL);

static inline void cipher_crypt_one(struct crypto_cipher *tfm,
				    u8 *dst, const u8 *src, bool enc)
{
	unsigned long alignmask = crypto_cipher_alignmask(tfm);
	struct cipher_alg *cia = crypto_cipher_alg(tfm);
	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
		enc ? cia->cia_encrypt : cia->cia_decrypt;

	if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
		unsigned int bs = crypto_cipher_blocksize(tfm);
		u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
		u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);

		memcpy(tmp, src, bs);
		fn(crypto_cipher_tfm(tfm), tmp, tmp);
		memcpy(dst, tmp, bs);
	} else {
		fn(crypto_cipher_tfm(tfm), dst, src);
	}
}

void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
			       u8 *dst, const u8 *src)
{
	cipher_crypt_one(tfm, dst, src, true);
}
EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL);

void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
			       u8 *dst, const u8 *src)
{
	cipher_crypt_one(tfm, dst, src, false);
}
EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);

struct crypto_cipher *crypto_clone_cipher(struct crypto_cipher *cipher)
{
	struct crypto_tfm *tfm = crypto_cipher_tfm(cipher);
	struct crypto_alg *alg = tfm->__crt_alg;
	struct crypto_cipher *ncipher;
	struct crypto_tfm *ntfm;

	if (alg->cra_init)
		return ERR_PTR(-ENOSYS);

	if (unlikely(!crypto_mod_get(alg)))
		return ERR_PTR(-ESTALE);

	ntfm = __crypto_alloc_tfmgfp(alg, CRYPTO_ALG_TYPE_CIPHER,
				     CRYPTO_ALG_TYPE_MASK, GFP_ATOMIC);
	if (IS_ERR(ntfm)) {
		crypto_mod_put(alg);
		return ERR_CAST(ntfm);
	}

	ntfm->crt_flags = tfm->crt_flags;

	ncipher = __crypto_cipher_cast(ntfm);

	return ncipher;
}
EXPORT_SYMBOL_GPL(crypto_clone_cipher);