From: Andrei Botila <andrei.bot...@nxp.com>

Newer CAAM versions (Era 9+) support 16B IVs. Since for these devices
the HW limitation is no longer present newer version should process the
requests containing 16B IVs directly in hardware without using a fallback.

Signed-off-by: Andrei Botila <andrei.bot...@nxp.com>
---
 drivers/crypto/caam/caamalg.c      | 13 +++++++++----
 drivers/crypto/caam/caamalg_desc.c | 27 ++++++++++++++++-----------
 2 files changed, 25 insertions(+), 15 deletions(-)

diff --git a/drivers/crypto/caam/caamalg.c b/drivers/crypto/caam/caamalg.c
index a5447ae430b0..7e03854252b0 100644
--- a/drivers/crypto/caam/caamalg.c
+++ b/drivers/crypto/caam/caamalg.c
@@ -833,6 +833,7 @@ static int xts_skcipher_setkey(struct crypto_skcipher 
*skcipher, const u8 *key,
 {
        struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
        struct device *jrdev = ctx->jrdev;
+       struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
        u32 *desc;
        int err;
 
@@ -842,9 +843,12 @@ static int xts_skcipher_setkey(struct crypto_skcipher 
*skcipher, const u8 *key,
                return err;
        }
 
-       err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
-       if (err)
-               return err;
+       if (ctrlpriv->era <= 8 || (keylen != 2 * AES_KEYSIZE_128 &&
+                                  keylen != 2 * AES_KEYSIZE_256)) {
+               err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
+               if (err)
+                       return err;
+       }
 
        ctx->cdata.keylen = keylen;
        ctx->cdata.key_virt = key;
@@ -1786,13 +1790,14 @@ static inline int skcipher_crypt(struct 
skcipher_request *req, bool encrypt)
        struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
        struct device *jrdev = ctx->jrdev;
        struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
+       struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
        u32 *desc;
        int ret = 0;
 
        if (!req->cryptlen)
                return 0;
 
-       if (ctx->fallback && (xts_skcipher_ivsize(req) ||
+       if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) 
||
                              (ctx->cdata.keylen != 2 * AES_KEYSIZE_128 &&
                               ctx->cdata.keylen != 2 * AES_KEYSIZE_256))) {
                struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
diff --git a/drivers/crypto/caam/caamalg_desc.c 
b/drivers/crypto/caam/caamalg_desc.c
index d6c58184bb57..433d6d5cd582 100644
--- a/drivers/crypto/caam/caamalg_desc.c
+++ b/drivers/crypto/caam/caamalg_desc.c
@@ -1550,13 +1550,14 @@ void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, 
struct alginfo *cdata)
        set_jump_tgt_here(desc, key_jump_cmd);
 
        /*
-        * create sequence for loading the sector index
-        * Upper 8B of IV - will be used as sector index
-        * Lower 8B of IV - will be discarded
+        * create sequence for loading the sector index / 16B tweak value
+        * Lower 8B of IV - sector index / tweak lower half
+        * Upper 8B of IV - upper half of 16B tweak
         */
        append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                        (0x20 << LDST_OFFSET_SHIFT));
-       append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
+       append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                       (0x30 << LDST_OFFSET_SHIFT));
 
        /* Load operation */
        append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
@@ -1565,9 +1566,11 @@ void cnstr_shdsc_xts_skcipher_encap(u32 * const desc, 
struct alginfo *cdata)
        /* Perform operation */
        skcipher_append_src_dst(desc);
 
-       /* Store upper 8B of IV */
+       /* Store lower 8B and upper 8B of IV */
        append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                         (0x20 << LDST_OFFSET_SHIFT));
+       append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                        (0x30 << LDST_OFFSET_SHIFT));
 
        print_hex_dump_debug("xts skcipher enc shdesc@" __stringify(__LINE__)
                             ": ", DUMP_PREFIX_ADDRESS, 16, 4,
@@ -1609,23 +1612,25 @@ void cnstr_shdsc_xts_skcipher_decap(u32 * const desc, 
struct alginfo *cdata)
        set_jump_tgt_here(desc, key_jump_cmd);
 
        /*
-        * create sequence for loading the sector index
-        * Upper 8B of IV - will be used as sector index
-        * Lower 8B of IV - will be discarded
+        * create sequence for loading the sector index / 16B tweak value
+        * Lower 8B of IV - sector index / tweak lower half
+        * Upper 8B of IV - upper half of 16B tweak
         */
        append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                        (0x20 << LDST_OFFSET_SHIFT));
-       append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
-
+       append_seq_load(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                       (0x30 << LDST_OFFSET_SHIFT));
        /* Load operation */
        append_dec_op1(desc, cdata->algtype);
 
        /* Perform operation */
        skcipher_append_src_dst(desc);
 
-       /* Store upper 8B of IV */
+       /* Store lower 8B and upper 8B of IV */
        append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
                         (0x20 << LDST_OFFSET_SHIFT));
+       append_seq_store(desc, 8, LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
+                        (0x30 << LDST_OFFSET_SHIFT));
 
        print_hex_dump_debug("xts skcipher dec shdesc@" __stringify(__LINE__)
                             ": ", DUMP_PREFIX_ADDRESS, 16, 4, desc,
-- 
2.17.1

Reply via email to