Re: [RFC PATCH 2/2] arm64: add support for AES using ARMv8 Crypto Extensions

2013-09-15 Thread Russell King - ARM Linux
On Sat, Sep 14, 2013 at 05:11:53PM +0300, Jussi Kivilinna wrote:
 On 14.09.2013 16:30, Ard Biesheuvel wrote:
  On 14 September 2013 10:08, Jussi Kivilinna jussi.kivili...@iki.fi wrote:
  On 13.09.2013 18:08, Ard Biesheuvel wrote:
  This adds ARMv8 Crypto Extensions based implemenations of
  AES in CBC, CTR and XTS mode.
 
  Signed-off-by: Ard Biesheuvel ard.biesheu...@linaro.org
  ---
  ..snip..
  +static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  +unsigned int key_len)
  +{
  + struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  + u32 *flags = tfm-crt_flags;
  + int ret;
  +
  + ret = crypto_aes_expand_key(ctx-key1, in_key, key_len/2);
  + if (!ret)
  + ret = crypto_aes_expand_key(ctx-key2, in_key[key_len/2],
  + key_len/2);
 
  Use checkpatch.
 
  
  Um, I did get a bunch of errors and warnings from checkpatch.pl tbh,
  put not in this particular location. Care to elaborate?
  
 
 Well, the checkpatch.pl I had stored to brain had become corrupted and kept
 saying that you need spaces around all operators. But apparently spaces are
 just required for assignment operators.

checkpatch is not definitive.  It is merely an implementation of the
coding style.  The coding style is the definitive documentation, and
it says about this:

| Use one space around (on each side of) most binary and ternary operators,
| such as any of these:
|
|=  +  -  *  /  %  |^  =  =  ==  !=  ?  :
|
| but no space after unary operators:
|  *  +  -  ~  !  sizeof  typeof  alignof  __attribute__  defined
|
| no space before the postfix increment  decrement unary operators:
|++  --
|
| no space after the prefix increment  decrement unary operators:
|++  --
|
| and no space around the '.' and - structure member operators.

So, you're quite right that the above is wrong.  It needs spaces around the
/ operators.
--
To unsubscribe from this list: send the line unsubscribe linux-crypto in
the body of a message to majord...@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html


Re: [RFC PATCH 2/2] arm64: add support for AES using ARMv8 Crypto Extensions

2013-09-14 Thread Jussi Kivilinna
On 13.09.2013 18:08, Ard Biesheuvel wrote:
 This adds ARMv8 Crypto Extensions based implemenations of
 AES in CBC, CTR and XTS mode.
 
 Signed-off-by: Ard Biesheuvel ard.biesheu...@linaro.org
 ---
..snip..
 +static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
 +unsigned int key_len)
 +{
 + struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
 + u32 *flags = tfm-crt_flags;
 + int ret;
 +
 + ret = crypto_aes_expand_key(ctx-key1, in_key, key_len/2);
 + if (!ret)
 + ret = crypto_aes_expand_key(ctx-key2, in_key[key_len/2],
 + key_len/2);

Use checkpatch.

 + if (!ret)
 + return 0;
 +
 + *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 + return -EINVAL;
 +}
 +
 +static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 +struct scatterlist *src, unsigned int nbytes)
 +{
 + struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc-tfm);
 + int err, first, rounds = 6 + ctx-key_length/4;
 + struct blkcipher_walk walk;
 + unsigned int blocks;
 +
 + blkcipher_walk_init(walk, dst, src, nbytes);
 + err = blkcipher_walk_virt(desc, walk);
 +
 + kernel_neon_begin();

Is sleeping allowed within kernel_neon_begin/end block? If not, you need to
clear CRYPTO_TFM_REQ_MAY_SLEEP on desc-flags. Otherwise blkcipher_walk_done
might sleep.

 + for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
 + aesce_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 +   (u8*)ctx-key_enc, rounds, blocks, walk.iv,
 +   first);
 +
 + err = blkcipher_walk_done(desc, walk, blocks * AES_BLOCK_SIZE);
 + }
 + kernel_neon_end();
 +
 + /* non-integral sizes are not supported in CBC */
 + if (unlikely(walk.nbytes))
 + err = -EINVAL;

I think blkcipher_walk_done already does this check by comparing against
alg.cra_blocksize.

 +
 + return err;
 +}
..snip..
 +
 +static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
 +struct scatterlist *src, unsigned int nbytes)
 +{
 + struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc-tfm);
 + int err, first, rounds = 6 + ctx-key_length/4;
 + struct blkcipher_walk walk;
 + u8 ctr[AES_BLOCK_SIZE];
 +
 + blkcipher_walk_init(walk, dst, src, nbytes);
 + err = blkcipher_walk_virt(desc, walk);
 +
 + memcpy(ctr, walk.iv, AES_BLOCK_SIZE);
 +
 + kernel_neon_begin();
 + for (first = 1; (nbytes = walk.nbytes); first = 0) {
 + aesce_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
 +   (u8*)ctx-key_enc, rounds, nbytes, ctr, 
 first);
 +
 + err = blkcipher_walk_done(desc, walk, 0);
 +
 + /* non-integral block *must* be the last one */
 + if (unlikely(walk.nbytes  (nbytes  (AES_BLOCK_SIZE-1 {
 + err = -EINVAL;

Other CTR implementations do not have this.. not needed?

 + break;
 + }
 + }
..snip..
 +static struct crypto_alg aesce_cbc_algs[] = { {
 + .cra_name   = __cbc-aes-aesce,
 + .cra_driver_name= __driver-cbc-aes-aesce,
 + .cra_priority   = 0,
 + .cra_flags  = CRYPTO_ALG_TYPE_BLKCIPHER,
 + .cra_blocksize  = AES_BLOCK_SIZE,
 + .cra_ctxsize= sizeof(struct crypto_aes_ctx),
 + .cra_alignmask  = 0,
 + .cra_type   = crypto_blkcipher_type,
 + .cra_module = THIS_MODULE,
 + .cra_u = {
 + .blkcipher = {
 + .min_keysize= AES_MIN_KEY_SIZE,
 + .max_keysize= AES_MAX_KEY_SIZE,
 + .ivsize = AES_BLOCK_SIZE,
 + .setkey = crypto_aes_set_key,
 + .encrypt= cbc_encrypt,
 + .decrypt= cbc_decrypt,
 + },
 + },
 +}, {
 + .cra_name   = __ctr-aes-aesce,
 + .cra_driver_name= __driver-ctr-aes-aesce,
 + .cra_priority   = 0,
 + .cra_flags  = CRYPTO_ALG_TYPE_BLKCIPHER,
 + .cra_blocksize  = AES_BLOCK_SIZE,

CTR mode is stream cipher, cra_blocksize must be set to 1.

This should have been picked up by in-kernel run-time tests, check
CONFIG_CRYPTO_MANAGER_DISABLE_TESTS (and CONFIG_CRYPTO_TEST/tcrypt
module).

 + .cra_ctxsize= sizeof(struct crypto_aes_ctx),
 + .cra_alignmask  = 0,
 + .cra_type   = crypto_blkcipher_type,
 + .cra_module = THIS_MODULE,
 + .cra_u = {
 + .blkcipher = {
 + .min_keysize= AES_MIN_KEY_SIZE,
 + .max_keysize= AES_MAX_KEY_SIZE,
 + .ivsize = AES_BLOCK_SIZE,
 + 

[RFC PATCH 2/2] arm64: add support for AES using ARMv8 Crypto Extensions

2013-09-13 Thread Ard Biesheuvel
This adds ARMv8 Crypto Extensions based implemenations of
AES in CBC, CTR and XTS mode.

Signed-off-by: Ard Biesheuvel ard.biesheu...@linaro.org
---
 arch/arm64/Makefile  |   8 +-
 arch/arm64/crypto/Makefile   |  12 ++
 arch/arm64/crypto/aesce-cbc.S|  58 +++
 arch/arm64/crypto/aesce-ctr.S|  83 +
 arch/arm64/crypto/aesce-glue.c   | 352 +++
 arch/arm64/crypto/aesce-macros.S |  95 +++
 arch/arm64/crypto/aesce-xts.S| 129 ++
 crypto/Kconfig   |   7 +
 8 files changed, 741 insertions(+), 3 deletions(-)
 create mode 100644 arch/arm64/crypto/Makefile
 create mode 100644 arch/arm64/crypto/aesce-cbc.S
 create mode 100644 arch/arm64/crypto/aesce-ctr.S
 create mode 100644 arch/arm64/crypto/aesce-glue.c
 create mode 100644 arch/arm64/crypto/aesce-macros.S
 create mode 100644 arch/arm64/crypto/aesce-xts.S

diff --git a/arch/arm64/Makefile b/arch/arm64/Makefile
index d90cf79..c7d4959 100644
--- a/arch/arm64/Makefile
+++ b/arch/arm64/Makefile
@@ -36,9 +36,11 @@ TEXT_OFFSET := 0x0008
 
 export TEXT_OFFSET GZFLAGS
 
-core-y += arch/arm64/kernel/ arch/arm64/mm/
-core-$(CONFIG_KVM) += arch/arm64/kvm/
-core-$(CONFIG_XEN) += arch/arm64/xen/
+core-y += arch/arm64/kernel/ arch/arm64/mm/
+core-$(CONFIG_KVM) += arch/arm64/kvm/
+core-$(CONFIG_XEN) += arch/arm64/xen/
+core-$(CONFIG_CRYPTO)  += arch/arm64/crypto/
+
 libs-y := arch/arm64/lib/ $(libs-y)
 libs-y += $(LIBGCC)
 
diff --git a/arch/arm64/crypto/Makefile b/arch/arm64/crypto/Makefile
new file mode 100644
index 000..da1a437
--- /dev/null
+++ b/arch/arm64/crypto/Makefile
@@ -0,0 +1,12 @@
+#
+# linux/arch/arm64/crypto/Makefile
+#
+# Copyright (C) 2013 Linaro Ltd ard.biesheu...@linaro.org
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+
+aes-arm64ce-y := aesce-cbc.o aesce-ctr.o aesce-xts.o aesce-glue.o
+obj-$(CONFIG_CRYPTO_AES_ARM64CE) += aes-arm64ce.o
diff --git a/arch/arm64/crypto/aesce-cbc.S b/arch/arm64/crypto/aesce-cbc.S
new file mode 100644
index 000..d955bf2
--- /dev/null
+++ b/arch/arm64/crypto/aesce-cbc.S
@@ -0,0 +1,58 @@
+/*
+ * linux/arch/arm64/crypto/aesce-cbc.S - AES-CBC using ARMv8 crypto extensions
+ *
+ * Copyright (C) 2013 Linaro Ltd ard.biesheu...@linaro.org
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include linux/linkage.h
+
+#include aesce-macros.S
+
+   .text
+   .arch   armv8-a+crypto
+
+   // aesce_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[], int rounds,
+   //   int blocks, u8 iv[], int first)
+   // aesce_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[], int rounds,
+   //   int blocks, u8 iv[], int first)
+
+ENTRY(aesce_cbc_encrypt)
+   tst w6, #1
+   beq .Lencloop
+
+   ld1 {v2.16b}, [x5]  // get iv
+   load_round_keys w3, x2
+
+.Lencloop:
+   ld1 {v1.16b}, [x1], #16 // get next pt block
+   eor v0.16b, v1.16b, v2.16b  // ... and xor with iv
+   encrypt_block   v2.16b, v0.16b, w3
+   st1 {v2.16b}, [x0], #16
+   subsw4, w4, #1
+   bne .Lencloop
+   ret
+ENDPROC(aesce_cbc_encrypt)
+
+
+ENTRY(aesce_cbc_decrypt)
+   tst w6, #1
+   beq .Ldecloop
+
+   ld1 {v3.16b}, [x5]  // get iv
+   load_round_keys w3, x2
+
+.Ldecloop:
+   ld1 {v1.16b}, [x1], #16 // get next ct block
+   mov v0.16b, v1.16b  // ... and copy to v0
+   decrypt_block   v2.16b, v0.16b, w3
+   eor v0.16b, v2.16b, v3.16b  // xor with iv to get pt
+   mov v3.16b, v1.16b  // ct is next round's iv
+   st1 {v0.16b}, [x0], #16
+   subsw4, w4, #1
+   bne .Ldecloop
+   ret
+ENDPROC(aesce_cbc_decrypt)
diff --git a/arch/arm64/crypto/aesce-ctr.S b/arch/arm64/crypto/aesce-ctr.S
new file mode 100644
index 000..5b5f02f
--- /dev/null
+++ b/arch/arm64/crypto/aesce-ctr.S
@@ -0,0 +1,83 @@
+/*
+ * linux/arch/arm64/crypto/aesce-ctr.S - AES-CTR using ARMv8 crypto extensions
+ *
+ * Copyright (C) 2013 Linaro Ltd ard.biesheu...@linaro.org
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License version 2 as
+ * published by the Free Software Foundation.
+ */
+
+#include linux/linkage.h
+
+#include aesce-macros.S
+
+   .text
+   .arch   armv8-a+crypto
+
+   //