diff options
author | Tianjia Zhang <tianjia.zhang@linux.alibaba.com> | 2022-03-15 17:44:52 +0800 |
---|---|---|
committer | Herbert Xu <herbert@gondor.apana.org.au> | 2022-04-08 16:12:47 +0800 |
commit | 02436762f5ff4b3f662bc196c70a563bcbc92b7d (patch) | |
tree | edb39becc3278973ce8968e8e536e8a70fb054f0 /arch/arm64/crypto/sm4-ce-core.S | |
parent | 73c919d314ad57be900437fd329990b1d846b763 (diff) |
crypto: arm64/sm4-ce - rename to sm4-ce-cipher
The subsequent patches of the series will have an implementation
of SM4-ECB/CBC/CFB/CTR accelerated by the CE instruction set, which
conflicts with the current module name. In order to keep the naming
rules of the AES algorithm consistent, the sm4-ce algorithm is
renamed to sm4-ce-cipher.
In addition, the speed of sm4-ce-cipher is better than that of SM4
NEON. By the way, the priority of the algorithm is adjusted to 300,
which is also to leave room for the priority of SM4 NEON.
Signed-off-by: Tianjia Zhang <tianjia.zhang@linux.alibaba.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/arm64/crypto/sm4-ce-core.S')
-rw-r--r-- | arch/arm64/crypto/sm4-ce-core.S | 36 |
1 files changed, 0 insertions, 36 deletions
diff --git a/arch/arm64/crypto/sm4-ce-core.S b/arch/arm64/crypto/sm4-ce-core.S deleted file mode 100644 index 4ac6cfbc5797..000000000000 --- a/arch/arm64/crypto/sm4-ce-core.S +++ /dev/null @@ -1,36 +0,0 @@ -// SPDX-License-Identifier: GPL-2.0 - -#include <linux/linkage.h> -#include <asm/assembler.h> - - .irp b, 0, 1, 2, 3, 4, 5, 6, 7, 8 - .set .Lv\b\().4s, \b - .endr - - .macro sm4e, rd, rn - .inst 0xcec08400 | .L\rd | (.L\rn << 5) - .endm - - /* - * void sm4_ce_do_crypt(const u32 *rk, u32 *out, const u32 *in); - */ - .text -SYM_FUNC_START(sm4_ce_do_crypt) - ld1 {v8.4s}, [x2] - ld1 {v0.4s-v3.4s}, [x0], #64 -CPU_LE( rev32 v8.16b, v8.16b ) - ld1 {v4.4s-v7.4s}, [x0] - sm4e v8.4s, v0.4s - sm4e v8.4s, v1.4s - sm4e v8.4s, v2.4s - sm4e v8.4s, v3.4s - sm4e v8.4s, v4.4s - sm4e v8.4s, v5.4s - sm4e v8.4s, v6.4s - sm4e v8.4s, v7.4s - rev64 v8.4s, v8.4s - ext v8.16b, v8.16b, v8.16b, #8 -CPU_LE( rev32 v8.16b, v8.16b ) - st1 {v8.4s}, [x1] - ret -SYM_FUNC_END(sm4_ce_do_crypt) |