Re: [PATCH 09/22] dm-crypt: Make use of the new sg_map helper in 4 call sites

2017-04-14 Thread Logan Gunthorpe


On 14/04/17 02:39 AM, Christoph Hellwig wrote:
> On Thu, Apr 13, 2017 at 04:05:22PM -0600, Logan Gunthorpe wrote:
>> Very straightforward conversion to the new function in all four spots.
> 
> I think the right fix here is to switch dm-crypt to the ahash API
> that takes a scatterlist.

Hmm, well I'm not sure I understand the code enough to make that
conversion. But I was looking at it. One tricky bit seems to be that
crypt_iv_lmk_one adds a seed, skips the first 16 bytes in the page and
then hashes another 16 bytes from other data. What would you do
construct a new sgl for it and pass it to the ahash api?

The other thing is crypt_iv_lmk_post also seems to modify the page after
the hash with a  crypto_xor so you'd still need at least one kmap in there.

Logan


Re: [PATCH 09/22] dm-crypt: Make use of the new sg_map helper in 4 call sites

2017-04-14 Thread Christoph Hellwig
On Thu, Apr 13, 2017 at 04:05:22PM -0600, Logan Gunthorpe wrote:
> Very straightforward conversion to the new function in all four spots.

I think the right fix here is to switch dm-crypt to the ahash API
that takes a scatterlist.


[PATCH 09/22] dm-crypt: Make use of the new sg_map helper in 4 call sites

2017-04-13 Thread Logan Gunthorpe
Very straightforward conversion to the new function in all four spots.

Signed-off-by: Logan Gunthorpe 
---
 drivers/md/dm-crypt.c | 38 +-
 1 file changed, 25 insertions(+), 13 deletions(-)

diff --git a/drivers/md/dm-crypt.c b/drivers/md/dm-crypt.c
index 389a363..6bd0ffc 100644
--- a/drivers/md/dm-crypt.c
+++ b/drivers/md/dm-crypt.c
@@ -589,9 +589,12 @@ static int crypt_iv_lmk_gen(struct crypt_config *cc, u8 
*iv,
int r = 0;
 
if (bio_data_dir(dmreq->ctx->bio_in) == WRITE) {
-   src = kmap_atomic(sg_page(>sg_in));
-   r = crypt_iv_lmk_one(cc, iv, dmreq, src + dmreq->sg_in.offset);
-   kunmap_atomic(src);
+   src = sg_map(>sg_in, SG_KMAP_ATOMIC);
+   if (IS_ERR(src))
+   return PTR_ERR(src);
+
+   r = crypt_iv_lmk_one(cc, iv, dmreq, src);
+   sg_unmap(>sg_in, src, SG_KMAP_ATOMIC);
} else
memset(iv, 0, cc->iv_size);
 
@@ -607,14 +610,17 @@ static int crypt_iv_lmk_post(struct crypt_config *cc, u8 
*iv,
if (bio_data_dir(dmreq->ctx->bio_in) == WRITE)
return 0;
 
-   dst = kmap_atomic(sg_page(>sg_out));
-   r = crypt_iv_lmk_one(cc, iv, dmreq, dst + dmreq->sg_out.offset);
+   dst = sg_map(>sg_out, SG_KMAP_ATOMIC);
+   if (IS_ERR(dst))
+   return PTR_ERR(dst);
+
+   r = crypt_iv_lmk_one(cc, iv, dmreq, dst);
 
/* Tweak the first block of plaintext sector */
if (!r)
-   crypto_xor(dst + dmreq->sg_out.offset, iv, cc->iv_size);
+   crypto_xor(dst, iv, cc->iv_size);
 
-   kunmap_atomic(dst);
+   sg_unmap(>sg_out, dst, SG_KMAP_ATOMIC);
return r;
 }
 
@@ -731,9 +737,12 @@ static int crypt_iv_tcw_gen(struct crypt_config *cc, u8 
*iv,
 
/* Remove whitening from ciphertext */
if (bio_data_dir(dmreq->ctx->bio_in) != WRITE) {
-   src = kmap_atomic(sg_page(>sg_in));
-   r = crypt_iv_tcw_whitening(cc, dmreq, src + 
dmreq->sg_in.offset);
-   kunmap_atomic(src);
+   src = sg_map(>sg_in, SG_KMAP_ATOMIC);
+   if (IS_ERR(src))
+   return PTR_ERR(src);
+
+   r = crypt_iv_tcw_whitening(cc, dmreq, src);
+   sg_unmap(>sg_in, src, SG_KMAP_ATOMIC);
}
 
/* Calculate IV */
@@ -755,9 +764,12 @@ static int crypt_iv_tcw_post(struct crypt_config *cc, u8 
*iv,
return 0;
 
/* Apply whitening on ciphertext */
-   dst = kmap_atomic(sg_page(>sg_out));
-   r = crypt_iv_tcw_whitening(cc, dmreq, dst + dmreq->sg_out.offset);
-   kunmap_atomic(dst);
+   dst = sg_map(>sg_out, SG_KMAP_ATOMIC);
+   if (IS_ERR(dst))
+   return PTR_ERR(dst);
+
+   r = crypt_iv_tcw_whitening(cc, dmreq, dst);
+   sg_unmap(>sg_out, dst, SG_KMAP_ATOMIC);
 
return r;
 }
-- 
2.1.4