diff options
Diffstat (limited to 'arch/arm64/crypto/aes-ce-ccm-glue.c')
| -rw-r--r-- | arch/arm64/crypto/aes-ce-ccm-glue.c | 138 |
1 files changed, 61 insertions, 77 deletions
diff --git a/arch/arm64/crypto/aes-ce-ccm-glue.c b/arch/arm64/crypto/aes-ce-ccm-glue.c index a523b519700f..c4fd648471f1 100644 --- a/arch/arm64/crypto/aes-ce-ccm-glue.c +++ b/arch/arm64/crypto/aes-ce-ccm-glue.c @@ -8,7 +8,6 @@ * Author: Ard Biesheuvel <ardb@kernel.org> */ -#include <asm/neon.h> #include <linux/unaligned.h> #include <crypto/aes.h> #include <crypto/scatterwalk.h> @@ -16,9 +15,11 @@ #include <crypto/internal/skcipher.h> #include <linux/module.h> +#include <asm/simd.h> + #include "aes-ce-setkey.h" -MODULE_IMPORT_NS(CRYPTO_INTERNAL); +MODULE_IMPORT_NS("CRYPTO_INTERNAL"); static int num_rounds(struct crypto_aes_ctx *ctx) { @@ -114,11 +115,8 @@ static u32 ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes, in += adv; abytes -= adv; - if (unlikely(rem)) { - kernel_neon_end(); - kernel_neon_begin(); + if (unlikely(rem)) macp = 0; - } } else { u32 l = min(AES_BLOCK_SIZE - macp, abytes); @@ -156,23 +154,13 @@ static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[]) scatterwalk_start(&walk, req->src); do { - u32 n = scatterwalk_clamp(&walk, len); - u8 *p; - - if (!n) { - scatterwalk_start(&walk, sg_next(walk.sg)); - n = scatterwalk_clamp(&walk, len); - } - p = scatterwalk_map(&walk); - - macp = ce_aes_ccm_auth_data(mac, p, n, macp, ctx->key_enc, - num_rounds(ctx)); + unsigned int n; + n = scatterwalk_next(&walk, len); + macp = ce_aes_ccm_auth_data(mac, walk.addr, n, macp, + ctx->key_enc, num_rounds(ctx)); + scatterwalk_done_src(&walk, n); len -= n; - - scatterwalk_unmap(p); - scatterwalk_advance(&walk, n); - scatterwalk_done(&walk, 0, len); } while (len); } @@ -197,40 +185,38 @@ static int ccm_encrypt(struct aead_request *req) if (unlikely(err)) return err; - kernel_neon_begin(); + scoped_ksimd() { + if (req->assoclen) + ccm_calculate_auth_mac(req, mac); - if (req->assoclen) - ccm_calculate_auth_mac(req, mac); + do { + u32 tail = walk.nbytes % AES_BLOCK_SIZE; + const u8 *src = walk.src.virt.addr; + u8 *dst = walk.dst.virt.addr; + u8 buf[AES_BLOCK_SIZE]; + u8 *final_iv = NULL; - do { - u32 tail = walk.nbytes % AES_BLOCK_SIZE; - const u8 *src = walk.src.virt.addr; - u8 *dst = walk.dst.virt.addr; - u8 buf[AES_BLOCK_SIZE]; - u8 *final_iv = NULL; - - if (walk.nbytes == walk.total) { - tail = 0; - final_iv = orig_iv; - } - - if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) - src = dst = memcpy(&buf[sizeof(buf) - walk.nbytes], - src, walk.nbytes); + if (walk.nbytes == walk.total) { + tail = 0; + final_iv = orig_iv; + } - ce_aes_ccm_encrypt(dst, src, walk.nbytes - tail, - ctx->key_enc, num_rounds(ctx), - mac, walk.iv, final_iv); + if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) + src = dst = memcpy(&buf[sizeof(buf) - walk.nbytes], + src, walk.nbytes); - if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) - memcpy(walk.dst.virt.addr, dst, walk.nbytes); + ce_aes_ccm_encrypt(dst, src, walk.nbytes - tail, + ctx->key_enc, num_rounds(ctx), + mac, walk.iv, final_iv); - if (walk.nbytes) { - err = skcipher_walk_done(&walk, tail); - } - } while (walk.nbytes); + if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) + memcpy(walk.dst.virt.addr, dst, walk.nbytes); - kernel_neon_end(); + if (walk.nbytes) { + err = skcipher_walk_done(&walk, tail); + } + } while (walk.nbytes); + } if (unlikely(err)) return err; @@ -264,40 +250,38 @@ static int ccm_decrypt(struct aead_request *req) if (unlikely(err)) return err; - kernel_neon_begin(); + scoped_ksimd() { + if (req->assoclen) + ccm_calculate_auth_mac(req, mac); - if (req->assoclen) - ccm_calculate_auth_mac(req, mac); + do { + u32 tail = walk.nbytes % AES_BLOCK_SIZE; + const u8 *src = walk.src.virt.addr; + u8 *dst = walk.dst.virt.addr; + u8 buf[AES_BLOCK_SIZE]; + u8 *final_iv = NULL; - do { - u32 tail = walk.nbytes % AES_BLOCK_SIZE; - const u8 *src = walk.src.virt.addr; - u8 *dst = walk.dst.virt.addr; - u8 buf[AES_BLOCK_SIZE]; - u8 *final_iv = NULL; - - if (walk.nbytes == walk.total) { - tail = 0; - final_iv = orig_iv; - } - - if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) - src = dst = memcpy(&buf[sizeof(buf) - walk.nbytes], - src, walk.nbytes); + if (walk.nbytes == walk.total) { + tail = 0; + final_iv = orig_iv; + } - ce_aes_ccm_decrypt(dst, src, walk.nbytes - tail, - ctx->key_enc, num_rounds(ctx), - mac, walk.iv, final_iv); + if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) + src = dst = memcpy(&buf[sizeof(buf) - walk.nbytes], + src, walk.nbytes); - if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) - memcpy(walk.dst.virt.addr, dst, walk.nbytes); + ce_aes_ccm_decrypt(dst, src, walk.nbytes - tail, + ctx->key_enc, num_rounds(ctx), + mac, walk.iv, final_iv); - if (walk.nbytes) { - err = skcipher_walk_done(&walk, tail); - } - } while (walk.nbytes); + if (unlikely(walk.nbytes < AES_BLOCK_SIZE)) + memcpy(walk.dst.virt.addr, dst, walk.nbytes); - kernel_neon_end(); + if (walk.nbytes) { + err = skcipher_walk_done(&walk, tail); + } + } while (walk.nbytes); + } if (unlikely(err)) return err; |
