Switch from the old AES library functions (which use struct crypto_aes_ctx) to the new ones (which use struct aes_key and struct aes_enckey). In encryption-only use cases, this eliminates the unnecessary computation and caching of the decryption round keys. The new AES en/decryption functions are also much faster and use AES instructions when supported by the CPU.
Note: aes_encrypt_new() and aes_decrypt_new() will be renamed to aes_encrypt() and aes_decrypt(), respectively, once all callers of the old aes_encrypt() and aes_decrypt() have been updated. Signed-off-by: Eric Biggers <[email protected]> --- drivers/crypto/chelsio/chcr_algo.c | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/drivers/crypto/chelsio/chcr_algo.c b/drivers/crypto/chelsio/chcr_algo.c index 22cbc343198a..b6b97088dfc5 100644 --- a/drivers/crypto/chelsio/chcr_algo.c +++ b/drivers/crypto/chelsio/chcr_algo.c @@ -1026,11 +1026,11 @@ static int chcr_update_tweak(struct skcipher_request *req, u8 *iv, u32 isfinal) { struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm)); struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req); - struct crypto_aes_ctx aes; + struct aes_key aes; int ret, i; u8 *key; unsigned int keylen; int round = reqctx->last_req_len / AES_BLOCK_SIZE; int round8 = round / 8; @@ -1042,24 +1042,24 @@ static int chcr_update_tweak(struct skcipher_request *req, u8 *iv, /* For a 192 bit key remove the padded zeroes which was * added in chcr_xts_setkey */ if (KEY_CONTEXT_CK_SIZE_G(ntohl(ablkctx->key_ctx_hdr)) == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) - ret = aes_expandkey(&aes, key, keylen - 8); + ret = aes_preparekey(&aes, key, keylen - 8); else - ret = aes_expandkey(&aes, key, keylen); + ret = aes_preparekey(&aes, key, keylen); if (ret) return ret; - aes_encrypt(&aes, iv, iv); + aes_encrypt_new(&aes, iv, iv); for (i = 0; i < round8; i++) gf128mul_x8_ble((le128 *)iv, (le128 *)iv); for (i = 0; i < (round % 8); i++) gf128mul_x_ble((le128 *)iv, (le128 *)iv); if (!isfinal) - aes_decrypt(&aes, iv, iv); + aes_decrypt_new(&aes, iv, iv); memzero_explicit(&aes, sizeof(aes)); return 0; } @@ -3404,11 +3404,11 @@ static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key, { struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead)); struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx); unsigned int ck_size; int ret = 0, key_ctx_size = 0; - struct crypto_aes_ctx aes; + struct aes_enckey aes; aeadctx->enckey_len = 0; crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK); crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) & CRYPTO_TFM_REQ_MASK); @@ -3442,17 +3442,17 @@ static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key, 0, 0, key_ctx_size >> 4); /* Calculate the H = CIPH(K, 0 repeated 16 times). * It will go in key context */ - ret = aes_expandkey(&aes, key, keylen); + ret = aes_prepareenckey(&aes, key, keylen); if (ret) { aeadctx->enckey_len = 0; goto out; } memset(gctx->ghash_h, 0, AEAD_H_SIZE); - aes_encrypt(&aes, gctx->ghash_h, gctx->ghash_h); + aes_encrypt_new(&aes, gctx->ghash_h, gctx->ghash_h); memzero_explicit(&aes, sizeof(aes)); out: return ret; } -- 2.52.0
