Replace the glue helper dependency with implementations of ECB and CBC
based on the new CPP macros, which avoid the need for indirect calls.

Signed-off-by: Ard Biesheuvel <a...@kernel.org>
---
 arch/x86/crypto/camellia_aesni_avx2_glue.c | 85 ++++++--------------
 arch/x86/crypto/camellia_aesni_avx_glue.c  | 73 +++++------------
 arch/x86/crypto/camellia_glue.c            | 61 ++++----------
 crypto/Kconfig                             |  2 -
 4 files changed, 60 insertions(+), 161 deletions(-)

diff --git a/arch/x86/crypto/camellia_aesni_avx2_glue.c 
b/arch/x86/crypto/camellia_aesni_avx2_glue.c
index 8f25a2a6222e..ef5c0f094584 100644
--- a/arch/x86/crypto/camellia_aesni_avx2_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx2_glue.c
@@ -6,7 +6,6 @@
  */
 
 #include <asm/crypto/camellia.h>
-#include <asm/crypto/glue_helper.h>
 #include <crypto/algapi.h>
 #include <crypto/internal/simd.h>
 #include <linux/crypto.h>
@@ -14,6 +13,8 @@
 #include <linux/module.h>
 #include <linux/types.h>
 
+#include "ecb_cbc_helpers.h"
+
 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
 #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
 
@@ -23,63 +24,6 @@ asmlinkage void camellia_ecb_dec_32way(const void *ctx, u8 
*dst, const u8 *src);
 
 asmlinkage void camellia_cbc_dec_32way(const void *ctx, u8 *dst, const u8 
*src);
 
-static const struct common_glue_ctx camellia_enc = {
-       .num_funcs = 4,
-       .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-
-       .funcs = { {
-               .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
-               .fn_u = { .ecb = camellia_ecb_enc_32way }
-       }, {
-               .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-               .fn_u = { .ecb = camellia_ecb_enc_16way }
-       }, {
-               .num_blocks = 2,
-               .fn_u = { .ecb = camellia_enc_blk_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .ecb = camellia_enc_blk }
-       } }
-};
-
-static const struct common_glue_ctx camellia_dec = {
-       .num_funcs = 4,
-       .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-
-       .funcs = { {
-               .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
-               .fn_u = { .ecb = camellia_ecb_dec_32way }
-       }, {
-               .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-               .fn_u = { .ecb = camellia_ecb_dec_16way }
-       }, {
-               .num_blocks = 2,
-               .fn_u = { .ecb = camellia_dec_blk_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .ecb = camellia_dec_blk }
-       } }
-};
-
-static const struct common_glue_ctx camellia_dec_cbc = {
-       .num_funcs = 4,
-       .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-
-       .funcs = { {
-               .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
-               .fn_u = { .cbc = camellia_cbc_dec_32way }
-       }, {
-               .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-               .fn_u = { .cbc = camellia_cbc_dec_16way }
-       }, {
-               .num_blocks = 2,
-               .fn_u = { .cbc = camellia_decrypt_cbc_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .cbc = camellia_dec_blk }
-       } }
-};
-
 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
                           unsigned int keylen)
 {
@@ -88,22 +32,39 @@ static int camellia_setkey(struct crypto_skcipher *tfm, 
const u8 *key,
 
 static int ecb_encrypt(struct skcipher_request *req)
 {
-       return glue_ecb_req_128bit(&camellia_enc, req);
+       ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, 
CAMELLIA_AESNI_PARALLEL_BLOCKS);
+       ECB_BLOCK(CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS, camellia_ecb_enc_32way);
+       ECB_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_ecb_enc_16way);
+       ECB_BLOCK(2, camellia_enc_blk_2way);
+       ECB_BLOCK(1, camellia_enc_blk);
+       ECB_WALK_END();
 }
 
 static int ecb_decrypt(struct skcipher_request *req)
 {
-       return glue_ecb_req_128bit(&camellia_dec, req);
+       ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, 
CAMELLIA_AESNI_PARALLEL_BLOCKS);
+       ECB_BLOCK(CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS, camellia_ecb_dec_32way);
+       ECB_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_ecb_dec_16way);
+       ECB_BLOCK(2, camellia_dec_blk_2way);
+       ECB_BLOCK(1, camellia_dec_blk);
+       ECB_WALK_END();
 }
 
 static int cbc_encrypt(struct skcipher_request *req)
 {
-       return glue_cbc_encrypt_req_128bit(camellia_enc_blk, req);
+       CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, -1);
+       CBC_ENC_BLOCK(camellia_enc_blk);
+       CBC_WALK_END();
 }
 
 static int cbc_decrypt(struct skcipher_request *req)
 {
-       return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
+       CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, 
CAMELLIA_AESNI_PARALLEL_BLOCKS);
+       CBC_DEC_BLOCK(CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS, 
camellia_cbc_dec_32way);
+       CBC_DEC_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_cbc_dec_16way);
+       CBC_DEC_BLOCK(2, camellia_decrypt_cbc_2way);
+       CBC_DEC_BLOCK(1, camellia_dec_blk);
+       CBC_WALK_END();
 }
 
 static struct skcipher_alg camellia_algs[] = {
diff --git a/arch/x86/crypto/camellia_aesni_avx_glue.c 
b/arch/x86/crypto/camellia_aesni_avx_glue.c
index 22a89cdfedfb..68fed0a79889 100644
--- a/arch/x86/crypto/camellia_aesni_avx_glue.c
+++ b/arch/x86/crypto/camellia_aesni_avx_glue.c
@@ -6,7 +6,6 @@
  */
 
 #include <asm/crypto/camellia.h>
-#include <asm/crypto/glue_helper.h>
 #include <crypto/algapi.h>
 #include <crypto/internal/simd.h>
 #include <linux/crypto.h>
@@ -14,6 +13,8 @@
 #include <linux/module.h>
 #include <linux/types.h>
 
+#include "ecb_cbc_helpers.h"
+
 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
 
 /* 16-way parallel cipher functions (avx/aes-ni) */
@@ -26,54 +27,6 @@ EXPORT_SYMBOL_GPL(camellia_ecb_dec_16way);
 asmlinkage void camellia_cbc_dec_16way(const void *ctx, u8 *dst, const u8 
*src);
 EXPORT_SYMBOL_GPL(camellia_cbc_dec_16way);
 
-static const struct common_glue_ctx camellia_enc = {
-       .num_funcs = 3,
-       .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-
-       .funcs = { {
-               .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-               .fn_u = { .ecb = camellia_ecb_enc_16way }
-       }, {
-               .num_blocks = 2,
-               .fn_u = { .ecb = camellia_enc_blk_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .ecb = camellia_enc_blk }
-       } }
-};
-
-static const struct common_glue_ctx camellia_dec = {
-       .num_funcs = 3,
-       .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-
-       .funcs = { {
-               .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-               .fn_u = { .ecb = camellia_ecb_dec_16way }
-       }, {
-               .num_blocks = 2,
-               .fn_u = { .ecb = camellia_dec_blk_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .ecb = camellia_dec_blk }
-       } }
-};
-
-static const struct common_glue_ctx camellia_dec_cbc = {
-       .num_funcs = 3,
-       .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-
-       .funcs = { {
-               .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
-               .fn_u = { .cbc = camellia_cbc_dec_16way }
-       }, {
-               .num_blocks = 2,
-               .fn_u = { .cbc = camellia_decrypt_cbc_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .cbc = camellia_dec_blk }
-       } }
-};
-
 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
                           unsigned int keylen)
 {
@@ -82,22 +35,36 @@ static int camellia_setkey(struct crypto_skcipher *tfm, 
const u8 *key,
 
 static int ecb_encrypt(struct skcipher_request *req)
 {
-       return glue_ecb_req_128bit(&camellia_enc, req);
+       ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, 
CAMELLIA_AESNI_PARALLEL_BLOCKS);
+       ECB_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_ecb_enc_16way);
+       ECB_BLOCK(2, camellia_enc_blk_2way);
+       ECB_BLOCK(1, camellia_enc_blk);
+       ECB_WALK_END();
 }
 
 static int ecb_decrypt(struct skcipher_request *req)
 {
-       return glue_ecb_req_128bit(&camellia_dec, req);
+       ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, 
CAMELLIA_AESNI_PARALLEL_BLOCKS);
+       ECB_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_ecb_dec_16way);
+       ECB_BLOCK(2, camellia_dec_blk_2way);
+       ECB_BLOCK(1, camellia_dec_blk);
+       ECB_WALK_END();
 }
 
 static int cbc_encrypt(struct skcipher_request *req)
 {
-       return glue_cbc_encrypt_req_128bit(camellia_enc_blk, req);
+       CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, -1);
+       CBC_ENC_BLOCK(camellia_enc_blk);
+       CBC_WALK_END();
 }
 
 static int cbc_decrypt(struct skcipher_request *req)
 {
-       return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
+       CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, 
CAMELLIA_AESNI_PARALLEL_BLOCKS);
+       CBC_DEC_BLOCK(CAMELLIA_AESNI_PARALLEL_BLOCKS, camellia_cbc_dec_16way);
+       CBC_DEC_BLOCK(2, camellia_decrypt_cbc_2way);
+       CBC_DEC_BLOCK(1, camellia_dec_blk);
+       CBC_WALK_END();
 }
 
 static struct skcipher_alg camellia_algs[] = {
diff --git a/arch/x86/crypto/camellia_glue.c b/arch/x86/crypto/camellia_glue.c
index fefeedf2b33d..6c314bb46211 100644
--- a/arch/x86/crypto/camellia_glue.c
+++ b/arch/x86/crypto/camellia_glue.c
@@ -15,7 +15,8 @@
 #include <linux/types.h>
 #include <crypto/algapi.h>
 #include <asm/crypto/camellia.h>
-#include <asm/crypto/glue_helper.h>
+
+#include "ecb_cbc_helpers.h"
 
 /* regular block cipher functions */
 asmlinkage void __camellia_enc_blk(const void *ctx, u8 *dst, const u8 *src,
@@ -1274,63 +1275,35 @@ void camellia_decrypt_cbc_2way(const void *ctx, u8 *d, 
const u8 *s)
 }
 EXPORT_SYMBOL_GPL(camellia_decrypt_cbc_2way);
 
-static const struct common_glue_ctx camellia_enc = {
-       .num_funcs = 2,
-       .fpu_blocks_limit = -1,
-
-       .funcs = { {
-               .num_blocks = 2,
-               .fn_u = { .ecb = camellia_enc_blk_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .ecb = camellia_enc_blk }
-       } }
-};
-
-static const struct common_glue_ctx camellia_dec = {
-       .num_funcs = 2,
-       .fpu_blocks_limit = -1,
-
-       .funcs = { {
-               .num_blocks = 2,
-               .fn_u = { .ecb = camellia_dec_blk_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .ecb = camellia_dec_blk }
-       } }
-};
-
-static const struct common_glue_ctx camellia_dec_cbc = {
-       .num_funcs = 2,
-       .fpu_blocks_limit = -1,
-
-       .funcs = { {
-               .num_blocks = 2,
-               .fn_u = { .cbc = camellia_decrypt_cbc_2way }
-       }, {
-               .num_blocks = 1,
-               .fn_u = { .cbc = camellia_dec_blk }
-       } }
-};
-
 static int ecb_encrypt(struct skcipher_request *req)
 {
-       return glue_ecb_req_128bit(&camellia_enc, req);
+       ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, -1);
+       ECB_BLOCK(2, camellia_enc_blk_2way);
+       ECB_BLOCK(1, camellia_enc_blk);
+       ECB_WALK_END();
 }
 
 static int ecb_decrypt(struct skcipher_request *req)
 {
-       return glue_ecb_req_128bit(&camellia_dec, req);
+       ECB_WALK_START(req, CAMELLIA_BLOCK_SIZE, -1);
+       ECB_BLOCK(2, camellia_dec_blk_2way);
+       ECB_BLOCK(1, camellia_dec_blk);
+       ECB_WALK_END();
 }
 
 static int cbc_encrypt(struct skcipher_request *req)
 {
-       return glue_cbc_encrypt_req_128bit(camellia_enc_blk, req);
+       CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, -1);
+       CBC_ENC_BLOCK(camellia_enc_blk);
+       CBC_WALK_END();
 }
 
 static int cbc_decrypt(struct skcipher_request *req)
 {
-       return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
+       CBC_WALK_START(req, CAMELLIA_BLOCK_SIZE, -1);
+       CBC_DEC_BLOCK(2, camellia_decrypt_cbc_2way);
+       CBC_DEC_BLOCK(1, camellia_dec_blk);
+       CBC_WALK_END();
 }
 
 static struct crypto_alg camellia_cipher_alg = {
diff --git a/crypto/Kconfig b/crypto/Kconfig
index 24c0e001d06d..f8518ff389bb 100644
--- a/crypto/Kconfig
+++ b/crypto/Kconfig
@@ -1286,7 +1286,6 @@ config CRYPTO_CAMELLIA_X86_64
        depends on X86 && 64BIT
        depends on CRYPTO
        select CRYPTO_SKCIPHER
-       select CRYPTO_GLUE_HELPER_X86
        imply CRYPTO_CTR
        help
          Camellia cipher algorithm module (x86_64).
@@ -1305,7 +1304,6 @@ config CRYPTO_CAMELLIA_AESNI_AVX_X86_64
        depends on CRYPTO
        select CRYPTO_SKCIPHER
        select CRYPTO_CAMELLIA_X86_64
-       select CRYPTO_GLUE_HELPER_X86
        select CRYPTO_SIMD
        imply CRYPTO_XTS
        help
-- 
2.17.1

Reply via email to