* cipher/cipher-gcm-riscv-zbb-zbc.c (_gcry_ghash_riscv_zbb_zbc): Rename to ... (ghash_polyval_riscv_zbb_zbc): ... this; Add 'is_polyval' argument. (_gcry_ghash_riscv_zbb_zbc): New. (ghash_polyval_riscv_zbb_zbc): New. * cipher/cipher-gcm.c [GCM_USE_RISCV_ZBB_ZBC] (ghash_polyval_riscv_zbb_zbc): New. (setupM) [GCM_USE_RISCV_ZBB_ZBC]: Add setup for 'c->u_mode.gcm.polyval_fn'. --
Benchmark on SpacemiT K1: Before: AES | nanosecs/byte mebibytes/sec cycles/byte auto Mhz GCM-SIV auth | 3.65 ns/B 261.4 MiB/s 5.84 c/B 1600 After: AES | nanosecs/byte mebibytes/sec cycles/byte auto Mhz GCM-SIV auth | 0.861 ns/B 1108 MiB/s 1.38 c/B 1600 Signed-off-by: Jussi Kivilinna <jussi.kivili...@iki.fi> --- cipher/cipher-gcm-riscv-zbb-zbc.c | 24 +++++++++++++++++++----- cipher/cipher-gcm.c | 5 +++++ 2 files changed, 24 insertions(+), 5 deletions(-) diff --git a/cipher/cipher-gcm-riscv-zbb-zbc.c b/cipher/cipher-gcm-riscv-zbb-zbc.c index 61539274..e32bfafe 100644 --- a/cipher/cipher-gcm-riscv-zbb-zbc.c +++ b/cipher/cipher-gcm-riscv-zbb-zbc.c @@ -190,9 +190,9 @@ reduction(u64x2x2 r0r1) return veor_u64x2(r0, r1); } -ASM_FUNC_ATTR_NOINLINE unsigned int -_gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, - size_t nblocks) +static ASM_FUNC_ATTR_INLINE unsigned int +ghash_polyval_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, + size_t nblocks, int is_polyval) { u64x2 rhash; u64x2 rh1; @@ -211,7 +211,7 @@ _gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, buf += 16; nblocks--; - rbuf = byteswap_u64x2(rbuf); + rbuf = is_polyval ? rbuf : byteswap_u64x2(rbuf); rhash = veor_u64x2(rhash, rbuf); @@ -223,7 +223,7 @@ _gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, rr0rr1 = pmul_128x128(rhash, rh1); - rbuf = byteswap_u64x2(rbuf); + rbuf = is_polyval ? rbuf : byteswap_u64x2(rbuf); rhash = reduction(rr0rr1); @@ -240,6 +240,20 @@ _gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, return 0; } +ASM_FUNC_ATTR_NOINLINE unsigned int +_gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, + size_t nblocks) +{ + return ghash_polyval_riscv_zbb_zbc(c, result, buf, nblocks, 0); +} + +ASM_FUNC_ATTR_NOINLINE unsigned int +_gcry_polyval_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, + size_t nblocks) +{ + return ghash_polyval_riscv_zbb_zbc(c, result, buf, nblocks, 1); +} + static ASM_FUNC_ATTR_INLINE void gcm_lsh_1(void *r_out, u64x2 i) { diff --git a/cipher/cipher-gcm.c b/cipher/cipher-gcm.c index 4c9f9ff5..a9c48551 100644 --- a/cipher/cipher-gcm.c +++ b/cipher/cipher-gcm.c @@ -107,6 +107,10 @@ extern void _gcry_ghash_setup_riscv_zbb_zbc(gcry_cipher_hd_t c); extern unsigned int _gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, const byte *buf, size_t nblocks); + +extern unsigned int _gcry_polyval_riscv_zbb_zbc(gcry_cipher_hd_t c, + byte *result, const byte *buf, + size_t nblocks); #endif /* GCM_USE_RISCV_ZBB_ZBC */ #ifdef GCM_USE_RISCV_ZVKG @@ -655,6 +659,7 @@ setupM (gcry_cipher_hd_t c) && (features & HWF_RISCV_ZBC)) { c->u_mode.gcm.ghash_fn = _gcry_ghash_riscv_zbb_zbc; + c->u_mode.gcm.polyval_fn = _gcry_polyval_riscv_zbb_zbc; _gcry_ghash_setup_riscv_zbb_zbc (c); } #endif -- 2.48.1 _______________________________________________ Gcrypt-devel mailing list Gcrypt-devel@gnupg.org https://lists.gnupg.org/mailman/listinfo/gcrypt-devel