diff --git a/arch/arm/crypto/chacha-glue.c b/arch/arm/crypto/chacha-glue.c index cdde8fd01f8f..50e635512046 100644 --- a/arch/arm/crypto/chacha-glue.c +++ b/arch/arm/crypto/chacha-glue.c @@ -76,12 +76,6 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) } EXPORT_SYMBOL(hchacha_block_arch); -void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) -{ - chacha_init_generic(state, key, iv); -} -EXPORT_SYMBOL(chacha_init_arch); - void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds) { @@ -116,7 +110,7 @@ static int chacha_stream_xor(struct skcipher_request *req, err = skcipher_walk_virt(&walk, req, false); - chacha_init_generic(state, ctx->key, iv); + chacha_init(state, ctx->key, iv); while (walk.nbytes > 0) { unsigned int nbytes = walk.nbytes; @@ -166,7 +160,7 @@ static int do_xchacha(struct skcipher_request *req, bool neon) u32 state[16]; u8 real_iv[16]; - chacha_init_generic(state, ctx->key, req->iv); + chacha_init(state, ctx->key, req->iv); if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) { hchacha_block_arm(state, subctx.key, ctx->nrounds); diff --git a/arch/arm64/crypto/chacha-neon-glue.c b/arch/arm64/crypto/chacha-neon-glue.c index af2bbca38e70..229876acfc58 100644 --- a/arch/arm64/crypto/chacha-neon-glue.c +++ b/arch/arm64/crypto/chacha-neon-glue.c @@ -74,12 +74,6 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) } EXPORT_SYMBOL(hchacha_block_arch); -void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) -{ - chacha_init_generic(state, key, iv); -} -EXPORT_SYMBOL(chacha_init_arch); - void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds) { @@ -110,7 +104,7 @@ static int chacha_neon_stream_xor(struct skcipher_request *req, err = skcipher_walk_virt(&walk, req, false); - chacha_init_generic(state, ctx->key, iv); + chacha_init(state, ctx->key, iv); while (walk.nbytes > 0) { unsigned int nbytes = walk.nbytes; @@ -151,7 +145,7 @@ static int xchacha_neon(struct skcipher_request *req) u32 state[16]; u8 real_iv[16]; - chacha_init_generic(state, ctx->key, req->iv); + chacha_init(state, ctx->key, req->iv); hchacha_block_arch(state, subctx.key, ctx->nrounds); subctx.nrounds = ctx->nrounds; diff --git a/arch/mips/crypto/chacha-glue.c b/arch/mips/crypto/chacha-glue.c index d1fd23e6ef84..f6fc2e1079a1 100644 --- a/arch/mips/crypto/chacha-glue.c +++ b/arch/mips/crypto/chacha-glue.c @@ -20,12 +20,6 @@ EXPORT_SYMBOL(chacha_crypt_arch); asmlinkage void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds); EXPORT_SYMBOL(hchacha_block_arch); -void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) -{ - chacha_init_generic(state, key, iv); -} -EXPORT_SYMBOL(chacha_init_arch); - static int chacha_mips_stream_xor(struct skcipher_request *req, const struct chacha_ctx *ctx, const u8 *iv) { @@ -35,7 +29,7 @@ static int chacha_mips_stream_xor(struct skcipher_request *req, err = skcipher_walk_virt(&walk, req, false); - chacha_init_generic(state, ctx->key, iv); + chacha_init(state, ctx->key, iv); while (walk.nbytes > 0) { unsigned int nbytes = walk.nbytes; @@ -67,7 +61,7 @@ static int xchacha_mips(struct skcipher_request *req) u32 state[16]; u8 real_iv[16]; - chacha_init_generic(state, ctx->key, req->iv); + chacha_init(state, ctx->key, req->iv); hchacha_block(state, subctx.key, ctx->nrounds); subctx.nrounds = ctx->nrounds; diff --git a/arch/powerpc/crypto/chacha-p10-glue.c b/arch/powerpc/crypto/chacha-p10-glue.c index 7c728755852e..d8796decc1fb 100644 --- a/arch/powerpc/crypto/chacha-p10-glue.c +++ b/arch/powerpc/crypto/chacha-p10-glue.c @@ -57,12 +57,6 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) } EXPORT_SYMBOL(hchacha_block_arch); -void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) -{ - chacha_init_generic(state, key, iv); -} -EXPORT_SYMBOL(chacha_init_arch); - void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds) { @@ -95,7 +89,7 @@ static int chacha_p10_stream_xor(struct skcipher_request *req, if (err) return err; - chacha_init_generic(state, ctx->key, iv); + chacha_init(state, ctx->key, iv); while (walk.nbytes > 0) { unsigned int nbytes = walk.nbytes; @@ -137,7 +131,7 @@ static int xchacha_p10(struct skcipher_request *req) u32 state[16]; u8 real_iv[16]; - chacha_init_generic(state, ctx->key, req->iv); + chacha_init(state, ctx->key, req->iv); hchacha_block_arch(state, subctx.key, ctx->nrounds); subctx.nrounds = ctx->nrounds; diff --git a/arch/s390/crypto/chacha-glue.c b/arch/s390/crypto/chacha-glue.c index f8b0c52e77a4..920e9f0941e7 100644 --- a/arch/s390/crypto/chacha-glue.c +++ b/arch/s390/crypto/chacha-glue.c @@ -41,7 +41,7 @@ static int chacha20_s390(struct skcipher_request *req) int rc; rc = skcipher_walk_virt(&walk, req, false); - chacha_init_generic(state, ctx->key, req->iv); + chacha_init(state, ctx->key, req->iv); while (walk.nbytes > 0) { nbytes = walk.nbytes; @@ -69,12 +69,6 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) } EXPORT_SYMBOL(hchacha_block_arch); -void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) -{ - chacha_init_generic(state, key, iv); -} -EXPORT_SYMBOL(chacha_init_arch); - void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds) { diff --git a/arch/x86/crypto/chacha_glue.c b/arch/x86/crypto/chacha_glue.c index 7b3a1cf0984b..8bb74a272879 100644 --- a/arch/x86/crypto/chacha_glue.c +++ b/arch/x86/crypto/chacha_glue.c @@ -133,12 +133,6 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds) } EXPORT_SYMBOL(hchacha_block_arch); -void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv) -{ - chacha_init_generic(state, key, iv); -} -EXPORT_SYMBOL(chacha_init_arch); - void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds) { @@ -169,7 +163,7 @@ static int chacha_simd_stream_xor(struct skcipher_request *req, err = skcipher_walk_virt(&walk, req, false); - chacha_init_generic(state, ctx->key, iv); + chacha_init(state, ctx->key, iv); while (walk.nbytes > 0) { unsigned int nbytes = walk.nbytes; @@ -211,7 +205,7 @@ static int xchacha_simd(struct skcipher_request *req) struct chacha_ctx subctx; u8 real_iv[16]; - chacha_init_generic(state, ctx->key, req->iv); + chacha_init(state, ctx->key, req->iv); if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) { kernel_fpu_begin(); diff --git a/crypto/chacha_generic.c b/crypto/chacha_generic.c index ba7fcb47f9aa..1fb9fbd302c6 100644 --- a/crypto/chacha_generic.c +++ b/crypto/chacha_generic.c @@ -21,7 +21,7 @@ static int chacha_stream_xor(struct skcipher_request *req, err = skcipher_walk_virt(&walk, req, false); - chacha_init_generic(state, ctx->key, iv); + chacha_init(state, ctx->key, iv); while (walk.nbytes > 0) { unsigned int nbytes = walk.nbytes; @@ -54,7 +54,7 @@ static int crypto_xchacha_crypt(struct skcipher_request *req) u8 real_iv[16]; /* Compute the subkey given the original key and first 128 nonce bits */ - chacha_init_generic(state, ctx->key, req->iv); + chacha_init(state, ctx->key, req->iv); hchacha_block_generic(state, subctx.key, ctx->nrounds); subctx.nrounds = ctx->nrounds; diff --git a/include/crypto/chacha.h b/include/crypto/chacha.h index 5bae6a55b333..f8cc073bba41 100644 --- a/include/crypto/chacha.h +++ b/include/crypto/chacha.h @@ -62,8 +62,7 @@ static inline void chacha_init_consts(u32 *state) state[3] = CHACHA_CONSTANT_TE_K; } -void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv); -static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv) +static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv) { chacha_init_consts(state); state[4] = key[0]; @@ -80,14 +79,6 @@ static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv) state[15] = get_unaligned_le32(iv + 12); } -static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv) -{ - if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA)) - chacha_init_arch(state, key, iv); - else - chacha_init_generic(state, key, iv); -} - void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes, int nrounds); void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src, diff --git a/tools/testing/crypto/chacha20-s390/test-cipher.c b/tools/testing/crypto/chacha20-s390/test-cipher.c index 8141d45df51a..35ea65c54ffa 100644 --- a/tools/testing/crypto/chacha20-s390/test-cipher.c +++ b/tools/testing/crypto/chacha20-s390/test-cipher.c @@ -66,7 +66,7 @@ static int test_lib_chacha(u8 *revert, u8 *cipher, u8 *plain) } /* Encrypt */ - chacha_init_arch(chacha_state, (u32*)key, iv); + chacha_init(chacha_state, (u32 *)key, iv); start = ktime_get_ns(); chacha_crypt_arch(chacha_state, cipher, plain, data_size, 20); @@ -81,7 +81,7 @@ static int test_lib_chacha(u8 *revert, u8 *cipher, u8 *plain) pr_info("lib encryption took: %lld nsec", end - start); /* Decrypt */ - chacha_init_arch(chacha_state, (u32 *)key, iv); + chacha_init(chacha_state, (u32 *)key, iv); start = ktime_get_ns(); chacha_crypt_arch(chacha_state, revert, cipher, data_size, 20);