Re: [PATCH 4/7] crypto: qce: Add support for AEAD algorithms

2021-03-16 Thread Thara Gopinath




On 3/12/21 8:01 AM, Herbert Xu wrote:

On Thu, Feb 25, 2021 at 01:27:13PM -0500, Thara Gopinath wrote:


+static int
+qce_aead_async_req_handle(struct crypto_async_request *async_req)
+{
+   struct aead_request *req = aead_request_cast(async_req);
+   struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+   struct crypto_aead *tfm = crypto_aead_reqtfm(req);
+   struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
+   struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
+   struct qce_device *qce = tmpl->qce;
+   enum dma_data_direction dir_src, dir_dst;
+   unsigned int totallen;
+   bool diff_dst;
+   int ret;
+
+   if (IS_CCM_RFC4309(rctx->flags)) {
+   memset(rctx->ccm_rfc4309_iv, 0, QCE_MAX_IV_SIZE);
+   rctx->ccm_rfc4309_iv[0] = 3;
+   memcpy(&rctx->ccm_rfc4309_iv[1], ctx->ccm4309_salt, 
QCE_CCM4309_SALT_SIZE);
+   memcpy(&rctx->ccm_rfc4309_iv[4], req->iv, 8);
+   rctx->iv = rctx->ccm_rfc4309_iv;
+   rctx->ivsize = AES_BLOCK_SIZE;
+   } else {
+   rctx->iv = req->iv;
+   rctx->ivsize = crypto_aead_ivsize(tfm);
+   }
+   if (IS_CCM_RFC4309(rctx->flags))
+   rctx->assoclen = req->assoclen - 8;
+   else
+   rctx->assoclen = req->assoclen;
+
+   totallen = rctx->cryptlen + rctx->assoclen;


This triggers a warning on totallen not being used.  Please fix.


hmm.. this is strange. I could swear that I checked for warnings before 
sending this out. But I will fix this. I will wait for a couple of more 
days for any other comments and then spin a v2.




Thanks,



--
Warm Regards
Thara


Re: [PATCH 4/7] crypto: qce: Add support for AEAD algorithms

2021-03-12 Thread Herbert Xu
On Thu, Feb 25, 2021 at 01:27:13PM -0500, Thara Gopinath wrote:
>
> +static int
> +qce_aead_async_req_handle(struct crypto_async_request *async_req)
> +{
> + struct aead_request *req = aead_request_cast(async_req);
> + struct qce_aead_reqctx *rctx = aead_request_ctx(req);
> + struct crypto_aead *tfm = crypto_aead_reqtfm(req);
> + struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
> + struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
> + struct qce_device *qce = tmpl->qce;
> + enum dma_data_direction dir_src, dir_dst;
> + unsigned int totallen;
> + bool diff_dst;
> + int ret;
> +
> + if (IS_CCM_RFC4309(rctx->flags)) {
> + memset(rctx->ccm_rfc4309_iv, 0, QCE_MAX_IV_SIZE);
> + rctx->ccm_rfc4309_iv[0] = 3;
> + memcpy(&rctx->ccm_rfc4309_iv[1], ctx->ccm4309_salt, 
> QCE_CCM4309_SALT_SIZE);
> + memcpy(&rctx->ccm_rfc4309_iv[4], req->iv, 8);
> + rctx->iv = rctx->ccm_rfc4309_iv;
> + rctx->ivsize = AES_BLOCK_SIZE;
> + } else {
> + rctx->iv = req->iv;
> + rctx->ivsize = crypto_aead_ivsize(tfm);
> + }
> + if (IS_CCM_RFC4309(rctx->flags))
> + rctx->assoclen = req->assoclen - 8;
> + else
> + rctx->assoclen = req->assoclen;
> +
> + totallen = rctx->cryptlen + rctx->assoclen;

This triggers a warning on totallen not being used.  Please fix.

Thanks,
-- 
Email: Herbert Xu 
Home Page: http://gondor.apana.org.au/~herbert/
PGP Key: http://gondor.apana.org.au/~herbert/pubkey.txt


[PATCH 4/7] crypto: qce: Add support for AEAD algorithms

2021-02-25 Thread Thara Gopinath
Introduce support to enable following algorithms in Qualcomm Crypto Engine.

- authenc(hmac(sha1),cbc(des))
- authenc(hmac(sha1),cbc(des3_ede))
- authenc(hmac(sha256),cbc(des))
- authenc(hmac(sha256),cbc(des3_ede))
- authenc(hmac(sha256),cbc(aes))
- ccm(aes)
- rfc4309(ccm(aes))

Signed-off-by: Thara Gopinath 
---
 drivers/crypto/Kconfig  |  15 +
 drivers/crypto/qce/Makefile |   1 +
 drivers/crypto/qce/aead.c   | 779 
 drivers/crypto/qce/aead.h   |  53 +++
 drivers/crypto/qce/common.h |   2 +
 drivers/crypto/qce/core.c   |   4 +
 6 files changed, 854 insertions(+)
 create mode 100644 drivers/crypto/qce/aead.c
 create mode 100644 drivers/crypto/qce/aead.h

diff --git a/drivers/crypto/Kconfig b/drivers/crypto/Kconfig
index e535f28a8028..8caf296acda4 100644
--- a/drivers/crypto/Kconfig
+++ b/drivers/crypto/Kconfig
@@ -645,6 +645,12 @@ config CRYPTO_DEV_QCE_SHA
select CRYPTO_SHA1
select CRYPTO_SHA256
 
+config CRYPTO_DEV_QCE_AEAD
+   bool
+   depends on CRYPTO_DEV_QCE
+   select CRYPTO_AUTHENC
+   select CRYPTO_LIB_DES
+
 choice
prompt "Algorithms enabled for QCE acceleration"
default CRYPTO_DEV_QCE_ENABLE_ALL
@@ -665,6 +671,7 @@ choice
bool "All supported algorithms"
select CRYPTO_DEV_QCE_SKCIPHER
select CRYPTO_DEV_QCE_SHA
+   select CRYPTO_DEV_QCE_AEAD
help
  Enable all supported algorithms:
- AES (CBC, CTR, ECB, XTS)
@@ -690,6 +697,14 @@ choice
- SHA1, HMAC-SHA1
- SHA256, HMAC-SHA256
 
+   config CRYPTO_DEV_QCE_ENABLE_AEAD
+   bool "AEAD algorithms only"
+   select CRYPTO_DEV_QCE_AEAD
+   help
+ Enable AEAD algorithms only:
+   - authenc()
+   - ccm(aes)
+   - rfc4309(ccm(aes))
 endchoice
 
 config CRYPTO_DEV_QCE_SW_MAX_LEN
diff --git a/drivers/crypto/qce/Makefile b/drivers/crypto/qce/Makefile
index 14ade8a7d664..2cf8984e1b85 100644
--- a/drivers/crypto/qce/Makefile
+++ b/drivers/crypto/qce/Makefile
@@ -6,3 +6,4 @@ qcrypto-objs := core.o \
 
 qcrypto-$(CONFIG_CRYPTO_DEV_QCE_SHA) += sha.o
 qcrypto-$(CONFIG_CRYPTO_DEV_QCE_SKCIPHER) += skcipher.o
+qcrypto-$(CONFIG_CRYPTO_DEV_QCE_AEAD) += aead.o
diff --git a/drivers/crypto/qce/aead.c b/drivers/crypto/qce/aead.c
new file mode 100644
index ..b594c4bb2640
--- /dev/null
+++ b/drivers/crypto/qce/aead.c
@@ -0,0 +1,779 @@
+// SPDX-License-Identifier: GPL-2.0-only
+
+/*
+ * Copyright (C) 2021, Linaro Limited. All rights reserved.
+ */
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include "aead.h"
+
+#define CCM_NONCE_ADATA_SHIFT  6
+#define CCM_NONCE_AUTHSIZE_SHIFT   3
+#define MAX_CCM_ADATA_HEADER_LEN6
+
+static LIST_HEAD(aead_algs);
+
+static void qce_aead_done(void *data)
+{
+   struct crypto_async_request *async_req = data;
+   struct aead_request *req = aead_request_cast(async_req);
+   struct qce_aead_reqctx *rctx = aead_request_ctx(req);
+   struct qce_aead_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
+   struct qce_alg_template *tmpl = to_aead_tmpl(crypto_aead_reqtfm(req));
+   struct qce_device *qce = tmpl->qce;
+   struct qce_result_dump *result_buf = qce->dma.result_buf;
+   enum dma_data_direction dir_src, dir_dst;
+   bool diff_dst;
+   int error;
+   u32 status;
+   unsigned int totallen;
+   unsigned char tag[SHA256_DIGEST_SIZE] = {0};
+   int ret = 0;
+
+   diff_dst = (req->src != req->dst) ? true : false;
+   dir_src = diff_dst ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL;
+   dir_dst = diff_dst ? DMA_FROM_DEVICE : DMA_BIDIRECTIONAL;
+
+   error = qce_dma_terminate_all(&qce->dma);
+   if (error)
+   dev_dbg(qce->dev, "aead dma termination error (%d)\n",
+   error);
+   if (diff_dst)
+   dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src);
+
+   dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
+
+   if (IS_CCM(rctx->flags)) {
+   if (req->assoclen) {
+   sg_free_table(&rctx->src_tbl);
+   if (diff_dst)
+   sg_free_table(&rctx->dst_tbl);
+   } else {
+   if (!(IS_DECRYPT(rctx->flags) && !diff_dst))
+   sg_free_table(&rctx->dst_tbl);
+   }
+   } else {
+   sg_free_table(&rctx->dst_tbl);
+   }
+
+   error = qce_check_status(qce, &status);
+   if (error < 0 && (error != -EBADMSG))
+   dev_err(qce->dev, "aead operation error (%x)\n", status);
+
+   if (IS_ENCRYPT(rctx->flags)) {
+   totallen = req->cryptlen + req->assoclen;
+   if (IS_CCM(rctx->flags))