summaryrefslogtreecommitdiff
path: root/crypto/serpent_generic.c
diff options
context:
space:
mode:
Diffstat (limited to 'crypto/serpent_generic.c')
-rw-r--r--crypto/serpent_generic.c264
1 files changed, 102 insertions, 162 deletions
diff --git a/crypto/serpent_generic.c b/crypto/serpent_generic.c
index 7ddbd7e88859..b21e7606c652 100644
--- a/crypto/serpent_generic.c
+++ b/crypto/serpent_generic.c
@@ -1,26 +1,17 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
/*
* Cryptographic API.
*
* Serpent Cipher Algorithm.
*
* Copyright (C) 2002 Dag Arne Osvik <osvik@ii.uib.no>
- * 2003 Herbert Valerio Riedel <hvr@gnu.org>
- *
- * Added tnepres support:
- * Ruben Jesus Garcia Hernandez <ruben@ugr.es>, 18.10.2004
- * Based on code by hvr
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
*/
+#include <crypto/algapi.h>
#include <linux/init.h>
#include <linux/module.h>
#include <linux/errno.h>
-#include <asm/byteorder.h>
-#include <linux/crypto.h>
+#include <linux/unaligned.h>
#include <linux/types.h>
#include <crypto/serpent.h>
@@ -229,12 +220,59 @@
x4 ^= x2; \
})
+/*
+ * both gcc and clang have misoptimized this function in the past,
+ * producing horrible object code from spilling temporary variables
+ * on the stack. Forcing this part out of line avoids that.
+ */
+static noinline void __serpent_setkey_sbox(u32 r0, u32 r1, u32 r2,
+ u32 r3, u32 r4, u32 *k)
+{
+ k += 100;
+ S3(r3, r4, r0, r1, r2); store_and_load_keys(r1, r2, r4, r3, 28, 24);
+ S4(r1, r2, r4, r3, r0); store_and_load_keys(r2, r4, r3, r0, 24, 20);
+ S5(r2, r4, r3, r0, r1); store_and_load_keys(r1, r2, r4, r0, 20, 16);
+ S6(r1, r2, r4, r0, r3); store_and_load_keys(r4, r3, r2, r0, 16, 12);
+ S7(r4, r3, r2, r0, r1); store_and_load_keys(r1, r2, r0, r4, 12, 8);
+ S0(r1, r2, r0, r4, r3); store_and_load_keys(r0, r2, r4, r1, 8, 4);
+ S1(r0, r2, r4, r1, r3); store_and_load_keys(r3, r4, r1, r0, 4, 0);
+ S2(r3, r4, r1, r0, r2); store_and_load_keys(r2, r4, r3, r0, 0, -4);
+ S3(r2, r4, r3, r0, r1); store_and_load_keys(r0, r1, r4, r2, -4, -8);
+ S4(r0, r1, r4, r2, r3); store_and_load_keys(r1, r4, r2, r3, -8, -12);
+ S5(r1, r4, r2, r3, r0); store_and_load_keys(r0, r1, r4, r3, -12, -16);
+ S6(r0, r1, r4, r3, r2); store_and_load_keys(r4, r2, r1, r3, -16, -20);
+ S7(r4, r2, r1, r3, r0); store_and_load_keys(r0, r1, r3, r4, -20, -24);
+ S0(r0, r1, r3, r4, r2); store_and_load_keys(r3, r1, r4, r0, -24, -28);
+ k -= 50;
+ S1(r3, r1, r4, r0, r2); store_and_load_keys(r2, r4, r0, r3, 22, 18);
+ S2(r2, r4, r0, r3, r1); store_and_load_keys(r1, r4, r2, r3, 18, 14);
+ S3(r1, r4, r2, r3, r0); store_and_load_keys(r3, r0, r4, r1, 14, 10);
+ S4(r3, r0, r4, r1, r2); store_and_load_keys(r0, r4, r1, r2, 10, 6);
+ S5(r0, r4, r1, r2, r3); store_and_load_keys(r3, r0, r4, r2, 6, 2);
+ S6(r3, r0, r4, r2, r1); store_and_load_keys(r4, r1, r0, r2, 2, -2);
+ S7(r4, r1, r0, r2, r3); store_and_load_keys(r3, r0, r2, r4, -2, -6);
+ S0(r3, r0, r2, r4, r1); store_and_load_keys(r2, r0, r4, r3, -6, -10);
+ S1(r2, r0, r4, r3, r1); store_and_load_keys(r1, r4, r3, r2, -10, -14);
+ S2(r1, r4, r3, r2, r0); store_and_load_keys(r0, r4, r1, r2, -14, -18);
+ S3(r0, r4, r1, r2, r3); store_and_load_keys(r2, r3, r4, r0, -18, -22);
+ k -= 50;
+ S4(r2, r3, r4, r0, r1); store_and_load_keys(r3, r4, r0, r1, 28, 24);
+ S5(r3, r4, r0, r1, r2); store_and_load_keys(r2, r3, r4, r1, 24, 20);
+ S6(r2, r3, r4, r1, r0); store_and_load_keys(r4, r0, r3, r1, 20, 16);
+ S7(r4, r0, r3, r1, r2); store_and_load_keys(r2, r3, r1, r4, 16, 12);
+ S0(r2, r3, r1, r4, r0); store_and_load_keys(r1, r3, r4, r2, 12, 8);
+ S1(r1, r3, r4, r2, r0); store_and_load_keys(r0, r4, r2, r1, 8, 4);
+ S2(r0, r4, r2, r1, r3); store_and_load_keys(r3, r4, r0, r1, 4, 0);
+ S3(r3, r4, r0, r1, r2); storekeys(r1, r2, r4, r3, 0);
+}
+
int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
unsigned int keylen)
{
u32 *k = ctx->expkey;
u8 *k8 = (u8 *)k;
u32 r0, r1, r2, r3, r4;
+ __le32 *lk;
int i;
/* Copy key, add padding */
@@ -246,22 +284,32 @@ int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
while (i < SERPENT_MAX_KEY_SIZE)
k8[i++] = 0;
+ lk = (__le32 *)k;
+ k[0] = le32_to_cpu(lk[0]);
+ k[1] = le32_to_cpu(lk[1]);
+ k[2] = le32_to_cpu(lk[2]);
+ k[3] = le32_to_cpu(lk[3]);
+ k[4] = le32_to_cpu(lk[4]);
+ k[5] = le32_to_cpu(lk[5]);
+ k[6] = le32_to_cpu(lk[6]);
+ k[7] = le32_to_cpu(lk[7]);
+
/* Expand key using polynomial */
- r0 = le32_to_cpu(k[3]);
- r1 = le32_to_cpu(k[4]);
- r2 = le32_to_cpu(k[5]);
- r3 = le32_to_cpu(k[6]);
- r4 = le32_to_cpu(k[7]);
-
- keyiter(le32_to_cpu(k[0]), r0, r4, r2, 0, 0);
- keyiter(le32_to_cpu(k[1]), r1, r0, r3, 1, 1);
- keyiter(le32_to_cpu(k[2]), r2, r1, r4, 2, 2);
- keyiter(le32_to_cpu(k[3]), r3, r2, r0, 3, 3);
- keyiter(le32_to_cpu(k[4]), r4, r3, r1, 4, 4);
- keyiter(le32_to_cpu(k[5]), r0, r4, r2, 5, 5);
- keyiter(le32_to_cpu(k[6]), r1, r0, r3, 6, 6);
- keyiter(le32_to_cpu(k[7]), r2, r1, r4, 7, 7);
+ r0 = k[3];
+ r1 = k[4];
+ r2 = k[5];
+ r3 = k[6];
+ r4 = k[7];
+
+ keyiter(k[0], r0, r4, r2, 0, 0);
+ keyiter(k[1], r1, r0, r3, 1, 1);
+ keyiter(k[2], r2, r1, r4, 2, 2);
+ keyiter(k[3], r3, r2, r0, 3, 3);
+ keyiter(k[4], r4, r3, r1, 4, 4);
+ keyiter(k[5], r0, r4, r2, 5, 5);
+ keyiter(k[6], r1, r0, r3, 6, 6);
+ keyiter(k[7], r2, r1, r4, 7, 7);
keyiter(k[0], r3, r2, r0, 8, 8);
keyiter(k[1], r4, r3, r1, 9, 9);
@@ -395,42 +443,7 @@ int __serpent_setkey(struct serpent_ctx *ctx, const u8 *key,
keyiter(k[23], r1, r0, r3, 131, 31);
/* Apply S-boxes */
-
- S3(r3, r4, r0, r1, r2); store_and_load_keys(r1, r2, r4, r3, 28, 24);
- S4(r1, r2, r4, r3, r0); store_and_load_keys(r2, r4, r3, r0, 24, 20);
- S5(r2, r4, r3, r0, r1); store_and_load_keys(r1, r2, r4, r0, 20, 16);
- S6(r1, r2, r4, r0, r3); store_and_load_keys(r4, r3, r2, r0, 16, 12);
- S7(r4, r3, r2, r0, r1); store_and_load_keys(r1, r2, r0, r4, 12, 8);
- S0(r1, r2, r0, r4, r3); store_and_load_keys(r0, r2, r4, r1, 8, 4);
- S1(r0, r2, r4, r1, r3); store_and_load_keys(r3, r4, r1, r0, 4, 0);
- S2(r3, r4, r1, r0, r2); store_and_load_keys(r2, r4, r3, r0, 0, -4);
- S3(r2, r4, r3, r0, r1); store_and_load_keys(r0, r1, r4, r2, -4, -8);
- S4(r0, r1, r4, r2, r3); store_and_load_keys(r1, r4, r2, r3, -8, -12);
- S5(r1, r4, r2, r3, r0); store_and_load_keys(r0, r1, r4, r3, -12, -16);
- S6(r0, r1, r4, r3, r2); store_and_load_keys(r4, r2, r1, r3, -16, -20);
- S7(r4, r2, r1, r3, r0); store_and_load_keys(r0, r1, r3, r4, -20, -24);
- S0(r0, r1, r3, r4, r2); store_and_load_keys(r3, r1, r4, r0, -24, -28);
- k -= 50;
- S1(r3, r1, r4, r0, r2); store_and_load_keys(r2, r4, r0, r3, 22, 18);
- S2(r2, r4, r0, r3, r1); store_and_load_keys(r1, r4, r2, r3, 18, 14);
- S3(r1, r4, r2, r3, r0); store_and_load_keys(r3, r0, r4, r1, 14, 10);
- S4(r3, r0, r4, r1, r2); store_and_load_keys(r0, r4, r1, r2, 10, 6);
- S5(r0, r4, r1, r2, r3); store_and_load_keys(r3, r0, r4, r2, 6, 2);
- S6(r3, r0, r4, r2, r1); store_and_load_keys(r4, r1, r0, r2, 2, -2);
- S7(r4, r1, r0, r2, r3); store_and_load_keys(r3, r0, r2, r4, -2, -6);
- S0(r3, r0, r2, r4, r1); store_and_load_keys(r2, r0, r4, r3, -6, -10);
- S1(r2, r0, r4, r3, r1); store_and_load_keys(r1, r4, r3, r2, -10, -14);
- S2(r1, r4, r3, r2, r0); store_and_load_keys(r0, r4, r1, r2, -14, -18);
- S3(r0, r4, r1, r2, r3); store_and_load_keys(r2, r3, r4, r0, -18, -22);
- k -= 50;
- S4(r2, r3, r4, r0, r1); store_and_load_keys(r3, r4, r0, r1, 28, 24);
- S5(r3, r4, r0, r1, r2); store_and_load_keys(r2, r3, r4, r1, 24, 20);
- S6(r2, r3, r4, r1, r0); store_and_load_keys(r4, r0, r3, r1, 20, 16);
- S7(r4, r0, r3, r1, r2); store_and_load_keys(r2, r3, r1, r4, 16, 12);
- S0(r2, r3, r1, r4, r0); store_and_load_keys(r1, r3, r4, r2, 12, 8);
- S1(r1, r3, r4, r2, r0); store_and_load_keys(r0, r4, r2, r1, 8, 4);
- S2(r0, r4, r2, r1, r3); store_and_load_keys(r3, r4, r0, r1, 4, 0);
- S3(r3, r4, r0, r1, r2); storekeys(r1, r2, r4, r3, 0);
+ __serpent_setkey_sbox(r0, r1, r2, r3, r4, ctx->expkey);
return 0;
}
@@ -442,22 +455,16 @@ int serpent_setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
}
EXPORT_SYMBOL_GPL(serpent_setkey);
-void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
+void __serpent_encrypt(const void *c, u8 *dst, const u8 *src)
{
+ const struct serpent_ctx *ctx = c;
const u32 *k = ctx->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
u32 r0, r1, r2, r3, r4;
-/*
- * Note: The conversions between u8* and u32* might cause trouble
- * on architectures with stricter alignment rules than x86
- */
-
- r0 = le32_to_cpu(s[0]);
- r1 = le32_to_cpu(s[1]);
- r2 = le32_to_cpu(s[2]);
- r3 = le32_to_cpu(s[3]);
+ r0 = get_unaligned_le32(src);
+ r1 = get_unaligned_le32(src + 4);
+ r2 = get_unaligned_le32(src + 8);
+ r3 = get_unaligned_le32(src + 12);
K(r0, r1, r2, r3, 0);
S0(r0, r1, r2, r3, r4); LK(r2, r1, r3, r0, r4, 1);
@@ -493,10 +500,10 @@ void __serpent_encrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
S6(r0, r1, r3, r2, r4); LK(r3, r4, r1, r2, r0, 31);
S7(r3, r4, r1, r2, r0); K(r0, r1, r2, r3, 32);
- d[0] = cpu_to_le32(r0);
- d[1] = cpu_to_le32(r1);
- d[2] = cpu_to_le32(r2);
- d[3] = cpu_to_le32(r3);
+ put_unaligned_le32(r0, dst);
+ put_unaligned_le32(r1, dst + 4);
+ put_unaligned_le32(r2, dst + 8);
+ put_unaligned_le32(r3, dst + 12);
}
EXPORT_SYMBOL_GPL(__serpent_encrypt);
@@ -507,17 +514,16 @@ static void serpent_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
__serpent_encrypt(ctx, dst, src);
}
-void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
+void __serpent_decrypt(const void *c, u8 *dst, const u8 *src)
{
+ const struct serpent_ctx *ctx = c;
const u32 *k = ctx->expkey;
- const __le32 *s = (const __le32 *)src;
- __le32 *d = (__le32 *)dst;
u32 r0, r1, r2, r3, r4;
- r0 = le32_to_cpu(s[0]);
- r1 = le32_to_cpu(s[1]);
- r2 = le32_to_cpu(s[2]);
- r3 = le32_to_cpu(s[3]);
+ r0 = get_unaligned_le32(src);
+ r1 = get_unaligned_le32(src + 4);
+ r2 = get_unaligned_le32(src + 8);
+ r3 = get_unaligned_le32(src + 12);
K(r0, r1, r2, r3, 32);
SI7(r0, r1, r2, r3, r4); KL(r1, r3, r0, r4, r2, 31);
@@ -553,10 +559,10 @@ void __serpent_decrypt(struct serpent_ctx *ctx, u8 *dst, const u8 *src)
SI1(r3, r1, r2, r0, r4); KL(r4, r1, r2, r0, r3, 1);
SI0(r4, r1, r2, r0, r3); K(r2, r3, r1, r4, 0);
- d[0] = cpu_to_le32(r2);
- d[1] = cpu_to_le32(r3);
- d[2] = cpu_to_le32(r1);
- d[3] = cpu_to_le32(r4);
+ put_unaligned_le32(r2, dst);
+ put_unaligned_le32(r3, dst + 4);
+ put_unaligned_le32(r1, dst + 8);
+ put_unaligned_le32(r4, dst + 12);
}
EXPORT_SYMBOL_GPL(__serpent_decrypt);
@@ -567,66 +573,13 @@ static void serpent_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
__serpent_decrypt(ctx, dst, src);
}
-static int tnepres_setkey(struct crypto_tfm *tfm, const u8 *key,
- unsigned int keylen)
-{
- u8 rev_key[SERPENT_MAX_KEY_SIZE];
- int i;
-
- for (i = 0; i < keylen; ++i)
- rev_key[keylen - i - 1] = key[i];
-
- return serpent_setkey(tfm, rev_key, keylen);
-}
-
-static void tnepres_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const u32 * const s = (const u32 * const)src;
- u32 * const d = (u32 * const)dst;
-
- u32 rs[4], rd[4];
-
- rs[0] = swab32(s[3]);
- rs[1] = swab32(s[2]);
- rs[2] = swab32(s[1]);
- rs[3] = swab32(s[0]);
-
- serpent_encrypt(tfm, (u8 *)rd, (u8 *)rs);
-
- d[0] = swab32(rd[3]);
- d[1] = swab32(rd[2]);
- d[2] = swab32(rd[1]);
- d[3] = swab32(rd[0]);
-}
-
-static void tnepres_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
-{
- const u32 * const s = (const u32 * const)src;
- u32 * const d = (u32 * const)dst;
-
- u32 rs[4], rd[4];
-
- rs[0] = swab32(s[3]);
- rs[1] = swab32(s[2]);
- rs[2] = swab32(s[1]);
- rs[3] = swab32(s[0]);
-
- serpent_decrypt(tfm, (u8 *)rd, (u8 *)rs);
-
- d[0] = swab32(rd[3]);
- d[1] = swab32(rd[2]);
- d[2] = swab32(rd[1]);
- d[3] = swab32(rd[0]);
-}
-
-static struct crypto_alg srp_algs[2] = { {
+static struct crypto_alg srp_alg = {
.cra_name = "serpent",
.cra_driver_name = "serpent-generic",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_CIPHER,
.cra_blocksize = SERPENT_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct serpent_ctx),
- .cra_alignmask = 3,
.cra_module = THIS_MODULE,
.cra_u = { .cipher = {
.cia_min_keysize = SERPENT_MIN_KEY_SIZE,
@@ -634,36 +587,23 @@ static struct crypto_alg srp_algs[2] = { {
.cia_setkey = serpent_setkey,
.cia_encrypt = serpent_encrypt,
.cia_decrypt = serpent_decrypt } }
-}, {
- .cra_name = "tnepres",
- .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
- .cra_blocksize = SERPENT_BLOCK_SIZE,
- .cra_ctxsize = sizeof(struct serpent_ctx),
- .cra_alignmask = 3,
- .cra_module = THIS_MODULE,
- .cra_u = { .cipher = {
- .cia_min_keysize = SERPENT_MIN_KEY_SIZE,
- .cia_max_keysize = SERPENT_MAX_KEY_SIZE,
- .cia_setkey = tnepres_setkey,
- .cia_encrypt = tnepres_encrypt,
- .cia_decrypt = tnepres_decrypt } }
-} };
+};
static int __init serpent_mod_init(void)
{
- return crypto_register_algs(srp_algs, ARRAY_SIZE(srp_algs));
+ return crypto_register_alg(&srp_alg);
}
static void __exit serpent_mod_fini(void)
{
- crypto_unregister_algs(srp_algs, ARRAY_SIZE(srp_algs));
+ crypto_unregister_alg(&srp_alg);
}
module_init(serpent_mod_init);
module_exit(serpent_mod_fini);
MODULE_LICENSE("GPL");
-MODULE_DESCRIPTION("Serpent and tnepres (kerneli compatible serpent reversed) Cipher Algorithm");
+MODULE_DESCRIPTION("Serpent Cipher Algorithm");
MODULE_AUTHOR("Dag Arne Osvik <osvik@ii.uib.no>");
-MODULE_ALIAS("tnepres");
-MODULE_ALIAS("serpent");
+MODULE_ALIAS_CRYPTO("serpent");
+MODULE_ALIAS_CRYPTO("serpent-generic");