summaryrefslogtreecommitdiff
path: root/arch/s390/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@google.com>2025-04-05 11:26:07 -0700
committerHerbert Xu <herbert@gondor.apana.org.au>2025-04-07 13:22:28 +0800
commitefe8ddfaa3361c5080ad10a1226652cc0839ee49 (patch)
treef510f456b3e7a1ad5ae6e03e9d4ba87b7958b4ba /arch/s390/crypto
parentf7915484c0200e7fe67d25b715408de6dc287047 (diff)
crypto: s390/chacha - remove the skcipher algorithms
Since crypto/chacha.c now registers chacha20-$(ARCH), xchacha20-$(ARCH), and xchacha12-$(ARCH) skcipher algorithms that use the architecture's ChaCha and HChaCha library functions, individual architectures no longer need to do the same. Therefore, remove the redundant skcipher algorithms and leave just the library functions. Signed-off-by: Eric Biggers <ebiggers@google.com> Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'arch/s390/crypto')
-rw-r--r--arch/s390/crypto/Kconfig7
-rw-r--r--arch/s390/crypto/chacha-glue.c101
2 files changed, 13 insertions, 95 deletions
diff --git a/arch/s390/crypto/Kconfig b/arch/s390/crypto/Kconfig
index 8c4db8b64fa2..055b08f259ab 100644
--- a/arch/s390/crypto/Kconfig
+++ b/arch/s390/crypto/Kconfig
@@ -110,16 +110,9 @@ config CRYPTO_DES_S390
config CRYPTO_CHACHA_S390
tristate
depends on S390
- select CRYPTO_SKCIPHER
select CRYPTO_LIB_CHACHA_GENERIC
select CRYPTO_ARCH_HAVE_LIB_CHACHA
default CRYPTO_LIB_CHACHA_INTERNAL
- help
- Length-preserving cipher: ChaCha20 stream cipher (RFC 7539)
-
- Architecture: s390
-
- It is available as of z13.
config CRYPTO_HMAC_S390
tristate "Keyed-hash message authentication code: HMAC"
diff --git a/arch/s390/crypto/chacha-glue.c b/arch/s390/crypto/chacha-glue.c
index 0c68191f2aa4..b3ffaa555385 100644
--- a/arch/s390/crypto/chacha-glue.c
+++ b/arch/s390/crypto/chacha-glue.c
@@ -1,6 +1,6 @@
// SPDX-License-Identifier: GPL-2.0
/*
- * s390 ChaCha stream cipher.
+ * ChaCha stream cipher (s390 optimized)
*
* Copyright IBM Corp. 2021
*/
@@ -8,9 +8,7 @@
#define KMSG_COMPONENT "chacha_s390"
#define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
-#include <crypto/internal/chacha.h>
-#include <crypto/internal/skcipher.h>
-#include <crypto/algapi.h>
+#include <crypto/chacha.h>
#include <linux/cpufeature.h>
#include <linux/kernel.h>
#include <linux/module.h>
@@ -18,50 +16,6 @@
#include <asm/fpu.h>
#include "chacha-s390.h"
-static void chacha20_crypt_s390(u32 *state, u8 *dst, const u8 *src,
- unsigned int nbytes, const u32 *key,
- u32 *counter)
-{
- DECLARE_KERNEL_FPU_ONSTACK32(vxstate);
-
- kernel_fpu_begin(&vxstate, KERNEL_VXR);
- chacha20_vx(dst, src, nbytes, key, counter);
- kernel_fpu_end(&vxstate, KERNEL_VXR);
-
- *counter += round_up(nbytes, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE;
-}
-
-static int chacha20_s390(struct skcipher_request *req)
-{
- struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
- struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
- u32 state[CHACHA_STATE_WORDS] __aligned(16);
- struct skcipher_walk walk;
- unsigned int nbytes;
- int rc;
-
- rc = skcipher_walk_virt(&walk, req, false);
- chacha_init(state, ctx->key, req->iv);
-
- while (walk.nbytes > 0) {
- nbytes = walk.nbytes;
- if (nbytes < walk.total)
- nbytes = round_down(nbytes, walk.stride);
-
- if (nbytes <= CHACHA_BLOCK_SIZE) {
- chacha_crypt_generic(state, walk.dst.virt.addr,
- walk.src.virt.addr, nbytes,
- ctx->nrounds);
- } else {
- chacha20_crypt_s390(state, walk.dst.virt.addr,
- walk.src.virt.addr, nbytes,
- &state[4], &state[12]);
- }
- rc = skcipher_walk_done(&walk, walk.nbytes - nbytes);
- }
- return rc;
-}
-
void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
{
/* TODO: implement hchacha_block_arch() in assembly */
@@ -76,32 +30,20 @@ void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
* it cannot handle a block of data or less, but otherwise
* it can handle data of arbitrary size
*/
- if (bytes <= CHACHA_BLOCK_SIZE || nrounds != 20 || !cpu_has_vx())
+ if (bytes <= CHACHA_BLOCK_SIZE || nrounds != 20 || !cpu_has_vx()) {
chacha_crypt_generic(state, dst, src, bytes, nrounds);
- else
- chacha20_crypt_s390(state, dst, src, bytes,
- &state[4], &state[12]);
-}
-EXPORT_SYMBOL(chacha_crypt_arch);
+ } else {
+ DECLARE_KERNEL_FPU_ONSTACK32(vxstate);
-static struct skcipher_alg chacha_algs[] = {
- {
- .base.cra_name = "chacha20",
- .base.cra_driver_name = "chacha20-s390",
- .base.cra_priority = 900,
- .base.cra_blocksize = 1,
- .base.cra_ctxsize = sizeof(struct chacha_ctx),
- .base.cra_module = THIS_MODULE,
+ kernel_fpu_begin(&vxstate, KERNEL_VXR);
+ chacha20_vx(dst, src, bytes, &state[4], &state[12]);
+ kernel_fpu_end(&vxstate, KERNEL_VXR);
- .min_keysize = CHACHA_KEY_SIZE,
- .max_keysize = CHACHA_KEY_SIZE,
- .ivsize = CHACHA_IV_SIZE,
- .chunksize = CHACHA_BLOCK_SIZE,
- .setkey = chacha20_setkey,
- .encrypt = chacha20_s390,
- .decrypt = chacha20_s390,
+ state[12] += round_up(bytes, CHACHA_BLOCK_SIZE) /
+ CHACHA_BLOCK_SIZE;
}
-};
+}
+EXPORT_SYMBOL(chacha_crypt_arch);
bool chacha_is_arch_optimized(void)
{
@@ -109,22 +51,5 @@ bool chacha_is_arch_optimized(void)
}
EXPORT_SYMBOL(chacha_is_arch_optimized);
-static int __init chacha_mod_init(void)
-{
- return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
- crypto_register_skciphers(chacha_algs, ARRAY_SIZE(chacha_algs)) : 0;
-}
-
-static void __exit chacha_mod_fini(void)
-{
- if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER))
- crypto_unregister_skciphers(chacha_algs, ARRAY_SIZE(chacha_algs));
-}
-
-arch_initcall(chacha_mod_init);
-module_exit(chacha_mod_fini);
-
-MODULE_DESCRIPTION("ChaCha20 stream cipher");
+MODULE_DESCRIPTION("ChaCha stream cipher (s390 optimized)");
MODULE_LICENSE("GPL v2");
-
-MODULE_ALIAS_CRYPTO("chacha20");