* cipher/Makefile.am: Add 'cipher-gcm-riscv-zvkg.c'.
* cipher/cipher-gcm-riscv-zbb-zbc.c: Fix cipher-internal.h include.
* cipher/cipher-gcm-riscv-zvkg.c: New.
* cipher/cipher-gcm.c [GCM_USE_RISCV_ZVKG] (_gcry_ghash_setup_riscv_zvkg)
(_gcry_ghash_riscv_zvkg, _gcry_polyval_riscv_zvkg): New.
(setupM) [GCM_USE_RISCV_ZVKG]: Add setup for Zvkg implementation.
* cipher/cipher-internal.h (GCM_USE_RISCV_ZVKG): New.
* configure.ac: Add 'cipher-gcm-riscv-zvkg.lo'.
(GCRY_RISCV_VECTOR_CRYPTO_INTRINSICS_TEST): Add check for Zvkg instrinsic.
* src/g10lib.h (HWF_RISCV_ZVKG): Insert before HWF_RISCV_ZVKNED.
* src/hwdf-riscv.h (HWF_RISCV_HWPROBE_EXT_ZVKG): New.
(hwprobe_features): Add HWF_RISCV_ZVKG.
* src/hwfeatures.c (hwflist) [HAVE_CPU_ARCH_RISCV]: Add "riscv-zvkg".
--

Implementation has been tested against QEMU emulator as there is no
actual HW available with these instructions yet.

Signed-off-by: Jussi Kivilinna <jussi.kivili...@iki.fi>
---
 cipher/Makefile.am                |  11 ++-
 cipher/cipher-gcm-riscv-zbb-zbc.c |   3 +-
 cipher/cipher-gcm-riscv-zvkg.c    | 130 ++++++++++++++++++++++++++++++
 cipher/cipher-gcm.c               |  20 +++++
 cipher/cipher-internal.h          |   9 +++
 configure.ac                      |  10 +++
 src/g10lib.h                      |   7 +-
 src/hwf-riscv.c                   |   2 +
 src/hwfeatures.c                  |   1 +
 9 files changed, 187 insertions(+), 6 deletions(-)
 create mode 100644 cipher/cipher-gcm-riscv-zvkg.c

diff --git a/cipher/Makefile.am b/cipher/Makefile.am
index 85c9c6d8..d31da411 100644
--- a/cipher/Makefile.am
+++ b/cipher/Makefile.am
@@ -93,7 +93,7 @@ EXTRA_libcipher_la_SOURCES = \
        cipher-gcm-ppc.c cipher-gcm-intel-pclmul.c \
        cipher-gcm-aarch64-simd.c cipher-gcm-armv7-neon.S \
        cipher-gcm-armv8-aarch32-ce.S cipher-gcm-armv8-aarch64-ce.S \
-       cipher-gcm-riscv-zbb-zbc.c \
+       cipher-gcm-riscv-zbb-zbc.c cipher-gcm-riscv-zvkg.c \
        crc.c crc-intel-pclmul.c crc-armv8-ce.c \
        crc-armv8-aarch64-ce.S \
        crc-ppc.c \
@@ -383,16 +383,25 @@ rijndael-vp-riscv.lo: $(srcdir)/rijndael-vp-riscv.c 
Makefile
 if ENABLE_RISCV_VECTOR_CRYPTO_INTRINSICS_EXTRA_CFLAGS
 riscv_vector_crypto_aes_cflags = -O2 -march=rv64imafdcv_zvkned -mstrict-align
 riscv_vector_crypto_sha_cflags = -O2 -march=rv64imafdcv_zvknha_zvknhb_zvkb 
-mstrict-align
+riscv_vector_crypto_gcm_cflags = -O2 -march=rv64imafdcv_zvkg -mstrict-align
 else
 if SUPPORT_CC_RISCV_MSTRICT_ALIGN
 riscv_vector_crypto_aes_cflags = -O2 -mstrict-align
 riscv_vector_crypto_sha_cflags = -O2 -mstrict-align
+riscv_vector_crypto_gcm_cflags = -O2 -mstrict-align
 else
 riscv_vector_crypto_aes_cflags =
 riscv_vector_crypto_sha_cflags =
+riscv_vector_crypto_gcm_cflags =
 endif
 endif
 
+cipher-gcm-riscv-zvkg.o: $(srcdir)/cipher-gcm-riscv-zvkg.c Makefile
+       `echo $(COMPILE) $(riscv_vector_crypto_gcm_cflags) -c $< | 
$(instrumentation_munging) `
+
+cipher-gcm-riscv-zvkg.lo: $(srcdir)/cipher-gcm-riscv-zvkg.c Makefile
+       `echo $(LTCOMPILE) $(riscv_vector_crypto_gcm_cflags) -c $< | 
$(instrumentation_munging) `
+
 rijndael-riscv-zvkned.o: $(srcdir)/rijndael-riscv-zvkned.c Makefile
        `echo $(COMPILE) $(riscv_vector_crypto_aes_cflags) -c $< | 
$(instrumentation_munging) `
 
diff --git a/cipher/cipher-gcm-riscv-zbb-zbc.c 
b/cipher/cipher-gcm-riscv-zbb-zbc.c
index 1a1f1484..61539274 100644
--- a/cipher/cipher-gcm-riscv-zbb-zbc.c
+++ b/cipher/cipher-gcm-riscv-zbb-zbc.c
@@ -23,7 +23,7 @@
 #include "g10lib.h"
 #include "cipher.h"
 #include "bufhelp.h"
-#include "./cipher-internal.h"
+#include "cipher-internal.h"
 
 #ifdef GCM_USE_RISCV_ZBB_ZBC
 
@@ -237,7 +237,6 @@ _gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result, 
const byte *buf,
 
   store_aligned_u64x2(result, rhash);
 
-
   return 0;
 }
 
diff --git a/cipher/cipher-gcm-riscv-zvkg.c b/cipher/cipher-gcm-riscv-zvkg.c
new file mode 100644
index 00000000..c459a6fb
--- /dev/null
+++ b/cipher/cipher-gcm-riscv-zvkg.c
@@ -0,0 +1,130 @@
+/* cipher-gcm-riscv-zvkg.c - RISC-V vector cryptography Zvkg accelerated GHASH
+ * Copyright (C) 2025 Jussi Kivilinna <jussi.kivili...@iki.fi>
+ *
+ * This file is part of Libgcrypt.
+ *
+ * Libgcrypt is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * Libgcrypt is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this program; if not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <config.h>
+
+#include "g10lib.h"
+#include "simd-common-riscv.h"
+#include "cipher-internal.h"
+
+#ifdef GCM_USE_RISCV_ZVKG
+
+#include <riscv_vector.h>
+
+
+#define ALWAYS_INLINE inline __attribute__((always_inline))
+#define NO_INLINE __attribute__((noinline))
+#define NO_INSTRUMENT_FUNCTION __attribute__((no_instrument_function))
+
+#define ASM_FUNC_ATTR          NO_INSTRUMENT_FUNCTION
+#define ASM_FUNC_ATTR_INLINE   ASM_FUNC_ATTR ALWAYS_INLINE
+#define ASM_FUNC_ATTR_NOINLINE ASM_FUNC_ATTR NO_INLINE
+
+
+#define cast_u8m1_u32m1(a) __riscv_vreinterpret_v_u8m1_u32m1(a)
+#define cast_u32m1_u8m1(a) __riscv_vreinterpret_v_u32m1_u8m1(a)
+
+
+static ASM_FUNC_ATTR_INLINE vuint32m1_t
+unaligned_load_u32m1(const void *ptr, size_t vl_u32)
+{
+  size_t vl_bytes = vl_u32 * 4;
+
+  return cast_u8m1_u32m1(__riscv_vle8_v_u8m1(ptr, vl_bytes));
+}
+
+static ASM_FUNC_ATTR_INLINE vuint32m1_t
+bswap128_u32m1(vuint32m1_t vec, size_t vl_u32)
+{
+  static const byte bswap128_arr[16] =
+    { 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 };
+  size_t vl_bytes = vl_u32 * 4;
+  vuint8m1_t bswap128 = __riscv_vle8_v_u8m1(bswap128_arr, vl_bytes);
+
+  return cast_u8m1_u32m1(
+           __riscv_vrgather_vv_u8m1(cast_u32m1_u8m1(vec), bswap128, vl_bytes));
+}
+
+
+ASM_FUNC_ATTR_NOINLINE int
+_gcry_ghash_setup_riscv_zvkg(gcry_cipher_hd_t c)
+{
+  (void)c;
+
+  if (__riscv_vsetvl_e32m1(4) != 4)
+    {
+      return 0; // VLEN=128 not supported.
+    }
+
+  return 1;
+}
+
+ASM_FUNC_ATTR_NOINLINE unsigned int
+_gcry_ghash_riscv_zvkg(gcry_cipher_hd_t c, byte *result, const byte *buf,
+                      size_t nblocks)
+{
+  u32 *result_u32 = (void *)result;
+  const u32 *key_u32 = (void *)c->u_mode.gcm.u_ghash_key.key;
+  size_t vl = 4;
+  vuint32m1_t rhash = __riscv_vle32_v_u32m1(result_u32, vl);
+  vuint32m1_t rh1 = __riscv_vle32_v_u32m1(key_u32, vl);
+
+  while (nblocks)
+    {
+      vuint32m1_t data = unaligned_load_u32m1(buf, vl);
+      buf += 16;
+      nblocks--;
+
+      rhash = __riscv_vghsh_vv_u32m1(rhash, rh1, data, vl);
+    }
+
+  __riscv_vse32_v_u32m1(result_u32, rhash, vl);
+
+  clear_vec_regs();
+
+  return 0;
+}
+
+ASM_FUNC_ATTR_NOINLINE unsigned int
+_gcry_polyval_riscv_zvkg(gcry_cipher_hd_t c, byte *result, const byte *buf,
+                      size_t nblocks)
+{
+  u32 *result_u32 = (void *)result;
+  const u32 *key_u32 = (void *)c->u_mode.gcm.u_ghash_key.key;
+  size_t vl = 4;
+  vuint32m1_t rhash = __riscv_vle32_v_u32m1(result_u32, vl);
+  vuint32m1_t rh1 = __riscv_vle32_v_u32m1(key_u32, vl);
+
+  while (nblocks)
+    {
+      vuint32m1_t data = bswap128_u32m1(unaligned_load_u32m1(buf, vl), vl);
+      buf += 16;
+      nblocks--;
+
+      rhash = __riscv_vghsh_vv_u32m1(rhash, rh1, data, vl);
+    }
+
+  __riscv_vse32_v_u32m1(result_u32, rhash, vl);
+
+  clear_vec_regs();
+
+  return 0;
+}
+
+#endif /* GCM_USE_RISCV_V_ZVKG */
diff --git a/cipher/cipher-gcm.c b/cipher/cipher-gcm.c
index 5bb98015..143ae52a 100644
--- a/cipher/cipher-gcm.c
+++ b/cipher/cipher-gcm.c
@@ -109,6 +109,16 @@ extern unsigned int 
_gcry_ghash_riscv_zbb_zbc(gcry_cipher_hd_t c, byte *result,
                                              const byte *buf, size_t nblocks);
 #endif /* GCM_USE_RISCV_ZBB_ZBC */
 
+#ifdef GCM_USE_RISCV_ZVKG
+extern int _gcry_ghash_setup_riscv_zvkg(gcry_cipher_hd_t c);
+
+extern unsigned int _gcry_ghash_riscv_zvkg(gcry_cipher_hd_t c, byte *result,
+                                          const byte *buf, size_t nblocks);
+
+extern unsigned int _gcry_polyval_riscv_zvkg(gcry_cipher_hd_t c, byte *result,
+                                            const byte *buf, size_t nblocks);
+#endif /* GCM_USE_RISCV_ZVKG */
+
 #ifdef GCM_USE_AARCH64
 extern void _gcry_ghash_setup_aarch64_simd(gcry_cipher_hd_t c);
 
@@ -628,6 +638,16 @@ setupM (gcry_cipher_hd_t c)
       _gcry_ghash_setup_aarch64_simd (c);
     }
 #endif
+#ifdef GCM_USE_RISCV_ZVKG
+  else if ((features & HWF_RISCV_IMAFDC)
+          && (features & HWF_RISCV_V)
+          && (features & HWF_RISCV_ZVKG)
+          && _gcry_ghash_setup_riscv_zvkg (c))
+    {
+      c->u_mode.gcm.ghash_fn = _gcry_ghash_riscv_zvkg;
+      c->u_mode.gcm.polyval_fn = _gcry_polyval_riscv_zvkg;
+    }
+#endif
 #ifdef GCM_USE_RISCV_ZBB_ZBC
   else if ((features & HWF_RISCV_IMAFDC)
           && (features & HWF_RISCV_ZBB)
diff --git a/cipher/cipher-internal.h b/cipher/cipher-internal.h
index f2a2099a..dc4878bb 100644
--- a/cipher/cipher-internal.h
+++ b/cipher/cipher-internal.h
@@ -144,6 +144,15 @@
 # define GCM_USE_RISCV_ZBB_ZBC 1
 #endif
 
+/* GCM_USE_RISCV_V_ZVKG indicates whether to enable RISC-V vector Zvkg
+ * code. */
+#undef GCM_USE_RISCV_ZVKG
+#if defined (__riscv) && \
+    defined(HAVE_COMPATIBLE_CC_RISCV_VECTOR_INTRINSICS) && \
+    defined(HAVE_COMPATIBLE_CC_RISCV_VECTOR_CRYPTO_INTRINSICS)
+# define GCM_USE_RISCV_ZVKG 1
+#endif
+
 typedef unsigned int (*ghash_fn_t) (gcry_cipher_hd_t c, byte *result,
                                     const byte *buf, size_t nblocks);
 
diff --git a/configure.ac b/configure.ac
index 36256df0..1b7d79f3 100644
--- a/configure.ac
+++ b/configure.ac
@@ -2838,6 +2838,15 @@ m4_define([GCRY_RISCV_VECTOR_CRYPTO_INTRINSICS_TEST],
        __riscv_vse32_v_u32m1(ptr + 3 * vl, f, vl);
        __riscv_vse32_v_u32m1(ptr + 4 * vl, g, vl);
       }
+      void test_ghash(unsigned int *ptr)
+      {
+       int vl = __riscv_vsetvl_e32m1 (4);
+       vuint32m1_t a = __riscv_vle32_v_u32m1(ptr + 0 * vl, vl);
+       vuint32m1_t b = __riscv_vle32_v_u32m1(ptr + 1 * vl, vl);
+       vuint32m1_t c = __riscv_vle32_v_u32m1(ptr + 2 * vl, vl);
+       vuint32m1_t d = __riscv_vghsh_vv_u32m1(a, b, c, vl);
+       __riscv_vse32_v_u32m1(ptr + 0 * vl, d, vl);
+      }
       void test_inline_vec_asm(unsigned int *ptr)
       {
        int vl = __riscv_vsetvl_e32m1 (4);
@@ -4059,6 +4068,7 @@ case "${host}" in
   ;;
   riscv64-*-*)
     GCRYPT_ASM_DIGESTS="$GCRYPT_ASM_DIGESTS cipher-gcm-riscv-zbb-zbc.lo"
+    GCRYPT_ASM_DIGESTS="$GCRYPT_ASM_DIGESTS cipher-gcm-riscv-zvkg.lo"
   ;;
 esac
 
diff --git a/src/g10lib.h b/src/g10lib.h
index 991ec3ea..6a4b9313 100644
--- a/src/g10lib.h
+++ b/src/g10lib.h
@@ -281,9 +281,10 @@ char **_gcry_strtokenize (const char *string, const char 
*delim);
 #define HWF_RISCV_ZBB           (1 << 2)
 #define HWF_RISCV_ZBC           (1 << 3)
 #define HWF_RISCV_ZVKB          (1 << 4)
-#define HWF_RISCV_ZVKNED        (1 << 5)
-#define HWF_RISCV_ZVKNHA        (1 << 6)
-#define HWF_RISCV_ZVKNHB        (1 << 7)
+#define HWF_RISCV_ZVKG          (1 << 5)
+#define HWF_RISCV_ZVKNED        (1 << 6)
+#define HWF_RISCV_ZVKNHA        (1 << 7)
+#define HWF_RISCV_ZVKNHB        (1 << 8)
 
 #endif
 
diff --git a/src/hwf-riscv.c b/src/hwf-riscv.c
index c37fd8dc..5a7cf777 100644
--- a/src/hwf-riscv.c
+++ b/src/hwf-riscv.c
@@ -191,6 +191,7 @@ detect_riscv_at_hwcap(void)
 #define HWF_RISCV_HWPROBE_EXT_ZBS           (1U << 5)
 #define HWF_RISCV_HWPROBE_EXT_ZBC           (1U << 7)
 #define HWF_RISCV_HWPROBE_EXT_ZVKB          (1U << 19)
+#define HWF_RISCV_HWPROBE_EXT_ZVKG          (1U << 20)
 #define HWF_RISCV_HWPROBE_EXT_ZVKNED        (1U << 21)
 #define HWF_RISCV_HWPROBE_EXT_ZVKNHA        (1U << 22)
 #define HWF_RISCV_HWPROBE_EXT_ZVKNHB        (1U << 23)
@@ -216,6 +217,7 @@ static const struct hwprobe_feature_map_s 
hwprobe_features[] =
     { HWF_RISCV_HWPROBE_EXT_ZBB,     HWF_RISCV_ZBB },
     { HWF_RISCV_HWPROBE_EXT_ZBC,     HWF_RISCV_ZBC },
     { HWF_RISCV_HWPROBE_EXT_ZVKB,    HWF_RISCV_ZVKB },
+    { HWF_RISCV_HWPROBE_EXT_ZVKG,    HWF_RISCV_ZVKG },
     { HWF_RISCV_HWPROBE_EXT_ZVKNED,  HWF_RISCV_ZVKNED },
     { HWF_RISCV_HWPROBE_EXT_ZVKNHA,  HWF_RISCV_ZVKNHA },
     { HWF_RISCV_HWPROBE_EXT_ZVKNHB,  HWF_RISCV_ZVKNHB },
diff --git a/src/hwfeatures.c b/src/hwfeatures.c
index 08b33090..aae9fdd3 100644
--- a/src/hwfeatures.c
+++ b/src/hwfeatures.c
@@ -97,6 +97,7 @@ static struct
     { HWF_RISCV_ZBB,           "riscv-zbb" },
     { HWF_RISCV_ZBC,           "riscv-zbc" },
     { HWF_RISCV_ZVKB,          "riscv-zvkb" },
+    { HWF_RISCV_ZVKNED,        "riscv-zvkg" },
     { HWF_RISCV_ZVKNED,        "riscv-zvkned" },
     { HWF_RISCV_ZVKNHA,        "riscv-zvknha" },
     { HWF_RISCV_ZVKNHB,        "riscv-zvknhb" },
-- 
2.48.1


_______________________________________________
Gcrypt-devel mailing list
Gcrypt-devel@gnupg.org
https://lists.gnupg.org/mailman/listinfo/gcrypt-devel

Reply via email to