summaryrefslogtreecommitdiff
path: root/crypto/poly1305_generic.c
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2016-11-07 20:47:09 +0100
committerHerbert Xu <herbert@gondor.apana.org.au>2016-11-13 17:45:03 +0800
commit109e23bd10b5688895e61916435834c71a0bd1ed (patch)
treed44afb0c4525df292525acc91bb76d2857105a2c /crypto/poly1305_generic.c
parent650d4a978271850278466ba7222bcfc893ce9542 (diff)
crypto: poly1305 - Use unaligned access where required
By using the unaligned access helpers, we drastically improve performance on small MIPS routers that have to go through the exception fix-up handler for these unaligned accesses. Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com> Reviewed-by: Eric Biggers <ebiggers@google.com> Acked-by: Martin Willi <martin@strongswan.org> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto/poly1305_generic.c')
-rw-r--r--crypto/poly1305_generic.c34
1 files changed, 15 insertions, 19 deletions
diff --git a/crypto/poly1305_generic.c b/crypto/poly1305_generic.c
index 2df9835dfbc0..b1c2d57dc734 100644
--- a/crypto/poly1305_generic.c
+++ b/crypto/poly1305_generic.c
@@ -17,6 +17,7 @@
#include <linux/crypto.h>
#include <linux/kernel.h>
#include <linux/module.h>
+#include <asm/unaligned.h>
static inline u64 mlt(u64 a, u64 b)
{
@@ -33,11 +34,6 @@ static inline u32 and(u32 v, u32 mask)
return v & mask;
}
-static inline u32 le32_to_cpuvp(const void *p)
-{
- return le32_to_cpup(p);
-}
-
int crypto_poly1305_init(struct shash_desc *desc)
{
struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
@@ -65,19 +61,19 @@ EXPORT_SYMBOL_GPL(crypto_poly1305_setkey);
static void poly1305_setrkey(struct poly1305_desc_ctx *dctx, const u8 *key)
{
/* r &= 0xffffffc0ffffffc0ffffffc0fffffff */
- dctx->r[0] = (le32_to_cpuvp(key + 0) >> 0) & 0x3ffffff;
- dctx->r[1] = (le32_to_cpuvp(key + 3) >> 2) & 0x3ffff03;
- dctx->r[2] = (le32_to_cpuvp(key + 6) >> 4) & 0x3ffc0ff;
- dctx->r[3] = (le32_to_cpuvp(key + 9) >> 6) & 0x3f03fff;
- dctx->r[4] = (le32_to_cpuvp(key + 12) >> 8) & 0x00fffff;
+ dctx->r[0] = (get_unaligned_le32(key + 0) >> 0) & 0x3ffffff;
+ dctx->r[1] = (get_unaligned_le32(key + 3) >> 2) & 0x3ffff03;
+ dctx->r[2] = (get_unaligned_le32(key + 6) >> 4) & 0x3ffc0ff;
+ dctx->r[3] = (get_unaligned_le32(key + 9) >> 6) & 0x3f03fff;
+ dctx->r[4] = (get_unaligned_le32(key + 12) >> 8) & 0x00fffff;
}
static void poly1305_setskey(struct poly1305_desc_ctx *dctx, const u8 *key)
{
- dctx->s[0] = le32_to_cpuvp(key + 0);
- dctx->s[1] = le32_to_cpuvp(key + 4);
- dctx->s[2] = le32_to_cpuvp(key + 8);
- dctx->s[3] = le32_to_cpuvp(key + 12);
+ dctx->s[0] = get_unaligned_le32(key + 0);
+ dctx->s[1] = get_unaligned_le32(key + 4);
+ dctx->s[2] = get_unaligned_le32(key + 8);
+ dctx->s[3] = get_unaligned_le32(key + 12);
}
unsigned int crypto_poly1305_setdesckey(struct poly1305_desc_ctx *dctx,
@@ -137,11 +133,11 @@ static unsigned int poly1305_blocks(struct poly1305_desc_ctx *dctx,
while (likely(srclen >= POLY1305_BLOCK_SIZE)) {
/* h += m[i] */
- h0 += (le32_to_cpuvp(src + 0) >> 0) & 0x3ffffff;
- h1 += (le32_to_cpuvp(src + 3) >> 2) & 0x3ffffff;
- h2 += (le32_to_cpuvp(src + 6) >> 4) & 0x3ffffff;
- h3 += (le32_to_cpuvp(src + 9) >> 6) & 0x3ffffff;
- h4 += (le32_to_cpuvp(src + 12) >> 8) | hibit;
+ h0 += (get_unaligned_le32(src + 0) >> 0) & 0x3ffffff;
+ h1 += (get_unaligned_le32(src + 3) >> 2) & 0x3ffffff;
+ h2 += (get_unaligned_le32(src + 6) >> 4) & 0x3ffffff;
+ h3 += (get_unaligned_le32(src + 9) >> 6) & 0x3ffffff;
+ h4 += (get_unaligned_le32(src + 12) >> 8) | hibit;
/* h *= r */
d0 = mlt(h0, r0) + mlt(h1, s4) + mlt(h2, s3) +