RE: [PATCH v5 2/4] siphash: add Nu{32,64} helpers

2016-12-16 Thread George Spelvin
Jason A. Donenfeld wrote:
> Isn't that equivalent to:
>   v0 = key[0];
>   v1 = key[1];
>   v2 = key[0] ^ (0x736f6d6570736575ULL ^ 0x646f72616e646f6dULL);
>   v3 = key[1] ^ (0x646f72616e646f6dULL ^ 0x7465646279746573ULL);

(Pre-XORing key[] with the first two constants which, if the constants
are random in the first place, can be a no-op.)  Other than the typo
in the v2 line, yes.  If they key is non-public, then you can xor an
arbitrary constant in to both halves to slightly speed up the startup.

(Nits: There's a typo in the v2 line, you don't need to parenthesize
associative operators like xor, and the "ull" suffix is redundant here.)

> Those constants also look like ASCII strings.

They are.  The ASCII is "somepseudorandomlygeneratedbytes".

> What cryptographic analysis has been done on the values?

They're "nothing up my sleeve numbers".

They're arbitrary numbers, and almost any other values would do exactly
as well.  The main properties are:

1) They're different (particulatly v0 != v2 and v1 != v3), and
2) Neither they, nor their xor, is rotationally symmetric like 0x.
   (Because SipHash is mostly rotationally symmetric, broken only by the
   interruption of the carry chain at the msbit, it helps slightly
   to break this up at the beginning.)

Those exact values only matter for portability.  If you don't need anyone
else to be able to compute matching outputs, then you could use any other
convenient constants (like the MD5 round constants).
--
To unsubscribe from this list: send the line "unsubscribe linux-crypto" in
the body of a message to majord...@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html


RE: [PATCH v5 2/4] siphash: add Nu{32,64} helpers

2016-12-16 Thread David Laight
From: Jason A. Donenfeld
> Sent: 15 December 2016 20:30
> These restore parity with the jhash interface by providing high
> performance helpers for common input sizes.
...
> +#define PREAMBLE(len) \
> + u64 v0 = 0x736f6d6570736575ULL; \
> + u64 v1 = 0x646f72616e646f6dULL; \
> + u64 v2 = 0x6c7967656e657261ULL; \
> + u64 v3 = 0x7465646279746573ULL; \
> + u64 b = ((u64)len) << 56; \
> + v3 ^= key[1]; \
> + v2 ^= key[0]; \
> + v1 ^= key[1]; \
> + v0 ^= key[0];

Isn't that equivalent to:
v0 = key[0];
v1 = key[1];
v2 = key[0] ^ (0x736f6d6570736575ULL ^ 0x646f72616e646f6dULL);
v3 = key[1] ^ (0x646f72616e646f6dULL ^ 0x7465646279746573ULL);

Those constants also look like ASCII strings.
What cryptographic analysis has been done on the values?

David

--
To unsubscribe from this list: send the line "unsubscribe linux-crypto" in
the body of a message to majord...@vger.kernel.org
More majordomo info at  http://vger.kernel.org/majordomo-info.html


[PATCH v5 2/4] siphash: add Nu{32,64} helpers

2016-12-15 Thread Jason A. Donenfeld
These restore parity with the jhash interface by providing high
performance helpers for common input sizes.

Signed-off-by: Jason A. Donenfeld 
Cc: Tom Herbert 
---
 include/linux/siphash.h |  33 ++
 lib/siphash.c   | 157 +---
 lib/test_siphash.c  |  18 ++
 3 files changed, 172 insertions(+), 36 deletions(-)

diff --git a/include/linux/siphash.h b/include/linux/siphash.h
index 145cf5667078..6f5a08a0fc7e 100644
--- a/include/linux/siphash.h
+++ b/include/linux/siphash.h
@@ -29,4 +29,37 @@ static inline u64 siphash_unaligned(const void *data, size_t 
len,
 u64 siphash_unaligned(const void *data, size_t len, const siphash_key_t key);
 #endif
 
+u64 siphash_1u64(const u64 a, const siphash_key_t key);
+u64 siphash_2u64(const u64 a, const u64 b, const siphash_key_t key);
+u64 siphash_3u64(const u64 a, const u64 b, const u64 c,
+const siphash_key_t key);
+u64 siphash_4u64(const u64 a, const u64 b, const u64 c, const u64 d,
+const siphash_key_t key);
+
+static inline u64 siphash_2u32(const u32 a, const u32 b, const siphash_key_t 
key)
+{
+   return siphash_1u64((u64)b << 32 | a, key);
+}
+
+static inline u64 siphash_4u32(const u32 a, const u32 b, const u32 c, const 
u32 d,
+  const siphash_key_t key)
+{
+   return siphash_2u64((u64)b << 32 | a, (u64)d << 32 | c, key);
+}
+
+static inline u64 siphash_6u32(const u32 a, const u32 b, const u32 c, const 
u32 d,
+  const u32 e, const u32 f, const siphash_key_t 
key)
+{
+   return siphash_3u64((u64)b << 32 | a, (u64)d << 32 | c, (u64)f << 32 | 
e,
+   key);
+}
+
+static inline u64 siphash_8u32(const u32 a, const u32 b, const u32 c, const 
u32 d,
+  const u32 e, const u32 f, const u32 g, const u32 
h,
+  const siphash_key_t key)
+{
+   return siphash_4u64((u64)b << 32 | a, (u64)d << 32 | c, (u64)f << 32 | 
e,
+   (u64)h << 32 | g, key);
+}
+
 #endif /* _LINUX_SIPHASH_H */
diff --git a/lib/siphash.c b/lib/siphash.c
index afc13cbb1b78..970c083ab06a 100644
--- a/lib/siphash.c
+++ b/lib/siphash.c
@@ -25,6 +25,29 @@
v2 += v1; v1 = rol64(v1, 17); v1 ^= v2; v2 = rol64(v2, 32); \
} while(0)
 
+#define PREAMBLE(len) \
+   u64 v0 = 0x736f6d6570736575ULL; \
+   u64 v1 = 0x646f72616e646f6dULL; \
+   u64 v2 = 0x6c7967656e657261ULL; \
+   u64 v3 = 0x7465646279746573ULL; \
+   u64 b = ((u64)len) << 56; \
+   v3 ^= key[1]; \
+   v2 ^= key[0]; \
+   v1 ^= key[1]; \
+   v0 ^= key[0];
+
+#define POSTAMBLE \
+   v3 ^= b; \
+   SIPROUND; \
+   SIPROUND; \
+   v0 ^= b; \
+   v2 ^= 0xff; \
+   SIPROUND; \
+   SIPROUND; \
+   SIPROUND; \
+   SIPROUND; \
+   return (v0 ^ v1) ^ (v2 ^ v3);
+
 /**
  * siphash - compute 64-bit siphash PRF value
  * @data: buffer to hash, must be aligned to SIPHASH_ALIGNMENT
@@ -33,18 +56,10 @@
  */
 u64 siphash(const void *data, size_t len, const siphash_key_t key)
 {
-   u64 v0 = 0x736f6d6570736575ULL;
-   u64 v1 = 0x646f72616e646f6dULL;
-   u64 v2 = 0x6c7967656e657261ULL;
-   u64 v3 = 0x7465646279746573ULL;
-   u64 b = ((u64)len) << 56;
-   u64 m;
const u8 *end = data + len - (len % sizeof(u64));
const u8 left = len & (sizeof(u64) - 1);
-   v3 ^= key[1];
-   v2 ^= key[0];
-   v1 ^= key[1];
-   v0 ^= key[0];
+   u64 m;
+   PREAMBLE(len)
for (; data != end; data += sizeof(u64)) {
m = le64_to_cpup(data);
v3 ^= m;
@@ -67,16 +82,7 @@ u64 siphash(const void *data, size_t len, const 
siphash_key_t key)
case 1: b |= end[0];
}
 #endif
-   v3 ^= b;
-   SIPROUND;
-   SIPROUND;
-   v0 ^= b;
-   v2 ^= 0xff;
-   SIPROUND;
-   SIPROUND;
-   SIPROUND;
-   SIPROUND;
-   return (v0 ^ v1) ^ (v2 ^ v3);
+   POSTAMBLE
 }
 EXPORT_SYMBOL(siphash);
 
@@ -89,18 +95,10 @@ EXPORT_SYMBOL(siphash);
  */
 u64 siphash_unaligned(const void *data, size_t len, const siphash_key_t key)
 {
-   u64 v0 = 0x736f6d6570736575ULL;
-   u64 v1 = 0x646f72616e646f6dULL;
-   u64 v2 = 0x6c7967656e657261ULL;
-   u64 v3 = 0x7465646279746573ULL;
-   u64 b = ((u64)len) << 56;
-   u64 m;
const u8 *end = data + len - (len % sizeof(u64));
const u8 left = len & (sizeof(u64) - 1);
-   v3 ^= key[1];
-   v2 ^= key[0];
-   v1 ^= key[1];
-   v0 ^= key[0];
+   u64 m;
+   PREAMBLE(len)
for (; data != end; data += sizeof(u64)) {
m = get_unaligned_le64(data);
v3 ^= m;
@@ -123,16 +121,103 @@ u64 siphash_unaligned(const void *data, size_t len, 
const siphash_key_t key)
case 1: b |= bytes[0];
}
 #endif
-   v3 ^= b;
+   POSTAMBLE
+}
+EXPORT_SYMBOL(siphash_unaligned);
+#endif
+
+/**
+ *