Re: [PATCH 2/5] Glue code for optmized Chacha20 implementation for ppc64le.

2023-04-25 Thread Danny Tsen

Got it.  Will fix it.

Thanks.

-Danny


On 4/25/23 12:41 AM, Herbert Xu wrote:

On Mon, Apr 24, 2023 at 02:47:23PM -0400, Danny Tsen wrote:

+static int chacha_p10_stream_xor(struct skcipher_request *req,
+const struct chacha_ctx *ctx, const u8 *iv)
+{
+   struct skcipher_walk walk;
+   u32 state[16];
+   int err;
+
+   err = skcipher_walk_virt(, req, false);
+   if (err)
+   return err;
+
+   chacha_init_generic(state, ctx->key, iv);
+
+   while (walk.nbytes > 0) {
+   unsigned int nbytes = walk.nbytes;
+
+   if (nbytes < walk.total)
+   nbytes = rounddown(nbytes, walk.stride);
+
+   if (!static_branch_likely(_p10) ||

You don't need the static branch in the Crypto API code since
the registration is already conditional.

Cheers,


Re: [PATCH 2/5] Glue code for optmized Chacha20 implementation for ppc64le.

2023-04-24 Thread Herbert Xu
On Mon, Apr 24, 2023 at 02:47:23PM -0400, Danny Tsen wrote:
>
> +static int chacha_p10_stream_xor(struct skcipher_request *req,
> +  const struct chacha_ctx *ctx, const u8 *iv)
> +{
> + struct skcipher_walk walk;
> + u32 state[16];
> + int err;
> +
> + err = skcipher_walk_virt(, req, false);
> + if (err)
> + return err;
> +
> + chacha_init_generic(state, ctx->key, iv);
> +
> + while (walk.nbytes > 0) {
> + unsigned int nbytes = walk.nbytes;
> +
> + if (nbytes < walk.total)
> + nbytes = rounddown(nbytes, walk.stride);
> +
> + if (!static_branch_likely(_p10) ||

You don't need the static branch in the Crypto API code since
the registration is already conditional.

Cheers,
-- 
Email: Herbert Xu 
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt


Re: [PATCH 2/5] Glue code for optmized Chacha20 implementation for ppc64le.

2023-04-24 Thread Herbert Xu
On Tue, Apr 25, 2023 at 01:37:22PM +0800, Herbert Xu wrote:
> On Mon, Apr 24, 2023 at 02:47:23PM -0400, Danny Tsen wrote:
> >
> > +static int __init chacha_p10_init(void)
> > +{
> > +   static_branch_enable(_p10);
> > +
> > +   return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
> > +   crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
> 
> What is this for? The usual way is to select CRYPTO_SKCIPHER
> rather than have a mysterious failure at run-time.

Nevermind, I see that you also have non-Crypto API code in there.
-- 
Email: Herbert Xu 
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt


Re: [PATCH 2/5] Glue code for optmized Chacha20 implementation for ppc64le.

2023-04-24 Thread Herbert Xu
On Mon, Apr 24, 2023 at 02:47:23PM -0400, Danny Tsen wrote:
>
> +static int __init chacha_p10_init(void)
> +{
> + static_branch_enable(_p10);
> +
> + return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
> + crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;

What is this for? The usual way is to select CRYPTO_SKCIPHER
rather than have a mysterious failure at run-time.

Thanks,
-- 
Email: Herbert Xu 
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt


[PATCH 2/5] Glue code for optmized Chacha20 implementation for ppc64le.

2023-04-24 Thread Danny Tsen
Signed-off-by: Danny Tsen 
---
 arch/powerpc/crypto/chacha-p10-glue.c | 223 ++
 1 file changed, 223 insertions(+)
 create mode 100644 arch/powerpc/crypto/chacha-p10-glue.c

diff --git a/arch/powerpc/crypto/chacha-p10-glue.c 
b/arch/powerpc/crypto/chacha-p10-glue.c
new file mode 100644
index ..cefb150e7b3c
--- /dev/null
+++ b/arch/powerpc/crypto/chacha-p10-glue.c
@@ -0,0 +1,223 @@
+// SPDX-License-Identifier: GPL-2.0-or-later
+/*
+ * PowerPC P10 (ppc64le) accelerated ChaCha and XChaCha stream ciphers,
+ * including ChaCha20 (RFC7539)
+ *
+ * Copyright 2023- IBM Inc. All rights reserved.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+asmlinkage void chacha_p10le_8x(u32 *state, u8 *dst, const u8 *src,
+   unsigned int len, int nrounds);
+
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_p10);
+
+static void vsx_begin(void)
+{
+   preempt_disable();
+   enable_kernel_vsx();
+}
+
+static void vsx_end(void)
+{
+   disable_kernel_vsx();
+   preempt_enable();
+}
+
+static void chacha_p10_do_8x(u32 *state, u8 *dst, const u8 *src,
+unsigned int bytes, int nrounds)
+{
+   unsigned int l = bytes & ~0x0FF;
+
+   if (l > 0) {
+   chacha_p10le_8x(state, dst, src, l, nrounds);
+   bytes -= l;
+   src += l;
+   dst += l;
+   state[12] += l / CHACHA_BLOCK_SIZE;
+   }
+
+   if (bytes > 0)
+   chacha_crypt_generic(state, dst, src, bytes, nrounds);
+}
+
+void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
+{
+   hchacha_block_generic(state, stream, nrounds);
+}
+EXPORT_SYMBOL(hchacha_block_arch);
+
+void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
+{
+   chacha_init_generic(state, key, iv);
+}
+EXPORT_SYMBOL(chacha_init_arch);
+
+void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
+  int nrounds)
+{
+   if (!static_branch_likely(_p10) || bytes <= CHACHA_BLOCK_SIZE ||
+   !crypto_simd_usable())
+   return chacha_crypt_generic(state, dst, src, bytes, nrounds);
+
+   do {
+   unsigned int todo = min_t(unsigned int, bytes, SZ_4K);
+
+   vsx_begin();
+   chacha_p10_do_8x(state, dst, src, todo, nrounds);
+   vsx_end();
+
+   bytes -= todo;
+   src += todo;
+   dst += todo;
+   } while (bytes);
+}
+EXPORT_SYMBOL(chacha_crypt_arch);
+
+static int chacha_p10_stream_xor(struct skcipher_request *req,
+const struct chacha_ctx *ctx, const u8 *iv)
+{
+   struct skcipher_walk walk;
+   u32 state[16];
+   int err;
+
+   err = skcipher_walk_virt(, req, false);
+   if (err)
+   return err;
+
+   chacha_init_generic(state, ctx->key, iv);
+
+   while (walk.nbytes > 0) {
+   unsigned int nbytes = walk.nbytes;
+
+   if (nbytes < walk.total)
+   nbytes = rounddown(nbytes, walk.stride);
+
+   if (!static_branch_likely(_p10) ||
+   !crypto_simd_usable()) {
+   chacha_crypt_generic(state, walk.dst.virt.addr,
+walk.src.virt.addr, nbytes,
+ctx->nrounds);
+   } else {
+   vsx_begin();
+   chacha_p10_do_8x(state, walk.dst.virt.addr,
+ walk.src.virt.addr, nbytes, ctx->nrounds);
+   vsx_end();
+   }
+   err = skcipher_walk_done(, walk.nbytes - nbytes);
+   if (err)
+   break;
+   }
+
+   return err;
+}
+
+static int chacha_p10(struct skcipher_request *req)
+{
+   struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+   struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
+
+   return chacha_p10_stream_xor(req, ctx, req->iv);
+}
+
+static int xchacha_p10(struct skcipher_request *req)
+{
+   struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
+   struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
+   struct chacha_ctx subctx;
+   u32 state[16];
+   u8 real_iv[16];
+
+   chacha_init_generic(state, ctx->key, req->iv);
+   hchacha_block_arch(state, subctx.key, ctx->nrounds);
+   subctx.nrounds = ctx->nrounds;
+
+   memcpy(_iv[0], req->iv + 24, 8);
+   memcpy(_iv[8], req->iv + 16, 8);
+   return chacha_p10_stream_xor(req, , real_iv);
+}
+
+static struct skcipher_alg algs[] = {
+   {
+   .base.cra_name  = "chacha20",
+   .base.cra_driver_name   = "chacha20-p10",
+   .base.cra_priority  = 300,
+   .base.cra_blocksize = 1,
+