Introduce type-aware kmalloc-family helpers to replace the common
idioms for single, array, and flexible object allocations:

        ptr = kmalloc(sizeof(*ptr), gfp);
        ptr = kcalloc(count, sizeof(*ptr), gfp);
        ptr = kmalloc_array(count, sizeof(*ptr), gfp);
        ptr = kcalloc(count, sizeof(*ptr), gfp);
        ptr = kmalloc(struct_size(ptr, flex_member, count), gfp);

These become, respectively:

        kmalloc_obj(p, gfp);
        kzalloc_obj(p, count, gfp);
        kmalloc_obj(p, count, gfp);
        kzalloc_obj(p, count, gfp);
        kmalloc_obj(p, flex_member, count, gfp);

These each return the size of the allocation, so that other common
idioms can be converted easily as well. For example:

        info->size = struct_size(ptr, flex_member, count);
        ptr = kmalloc(info->size, gfp);

becomes:

        info->size = kmalloc_obj(ptr, flex_member, count, gfp);

Internal introspection of allocated type also becomes possible, allowing
for alignment-aware choices and future hardening work. For example,
adding __alignof(*ptr) as an argument to the internal allocators so that
appropriate/efficient alignment choices can be made, or being able to
correctly choose per-allocation offset randomization within a bucket
that does not break alignment requirements.

Additionally, once __builtin_set_counted_by() (or equivalent) is added
by GCC and Clang, it will be possible to automatically set the counted
member of a struct with a counted_by FAM, further eliminating open-coded
redundant initializations:

        info->size = struct_size(ptr, flex_member, count);
        ptr = kmalloc(info->size, gfp);
        ptr->flex_count = count;

becomes (i.e. unchanged from earlier example):

        info->size = kmalloc_obj(ptr, flex_member, count, gfp);

Replacing all existing simple code patterns via Coccinelle[1] shows what
could be replaced immediately (saving roughly 1,500 lines):

 7040 files changed, 14128 insertions(+), 15557 deletions(-)

Link: 
https://github.com/kees/kernel-tools/blob/trunk/coccinelle/examples/kmalloc_obj-assign-size.cocci
 [1]
Signed-off-by: Kees Cook <[email protected]>
---
Cc: Vlastimil Babka <[email protected]>
Cc: Christoph Lameter <[email protected]>
Cc: Pekka Enberg <[email protected]>
Cc: David Rientjes <[email protected]>
Cc: Joonsoo Kim <[email protected]>
Cc: Andrew Morton <[email protected]>
Cc: Roman Gushchin <[email protected]>
Cc: Hyeonggon Yoo <[email protected]>
Cc: Gustavo A. R. Silva <[email protected]>
Cc: Bill Wendling <[email protected]>
Cc: Justin Stitt <[email protected]>
Cc: Jann Horn <[email protected]>
Cc: Przemek Kitszel <[email protected]>
Cc: Marco Elver <[email protected]>
Cc: [email protected]
---
 include/linux/slab.h | 38 ++++++++++++++++++++++++++++++++++++++
 1 file changed, 38 insertions(+)

diff --git a/include/linux/slab.h b/include/linux/slab.h
index 7247e217e21b..3817554f2d51 100644
--- a/include/linux/slab.h
+++ b/include/linux/slab.h
@@ -665,6 +665,44 @@ static __always_inline __alloc_size(1) void 
*kmalloc_noprof(size_t size, gfp_t f
 }
 #define kmalloc(...)                           
alloc_hooks(kmalloc_noprof(__VA_ARGS__))
 
+#define __alloc_obj3(ALLOC, P, COUNT, FLAGS)                   \
+({                                                             \
+       size_t __obj_size = size_mul(sizeof(*P), COUNT);        \
+       void *__obj_ptr;                                        \
+       (P) = __obj_ptr = ALLOC(__obj_size, FLAGS);             \
+       if (!__obj_ptr)                                         \
+               __obj_size = 0;                                 \
+       __obj_size;                                             \
+})
+
+#define __alloc_obj2(ALLOC, P, FLAGS)  __alloc_obj3(ALLOC, P, 1, FLAGS)
+
+#define __alloc_obj4(ALLOC, P, FAM, COUNT, FLAGS)              \
+({                                                             \
+       size_t __obj_size = struct_size(P, FAM, COUNT);         \
+       void *__obj_ptr;                                        \
+       (P) = __obj_ptr = ALLOC(__obj_size, FLAGS);             \
+       if (!__obj_ptr)                                         \
+               __obj_size = 0;                                 \
+       __obj_size;                                             \
+})
+
+#define kmalloc_obj(...)                                       \
+       CONCATENATE(__alloc_obj,                                \
+                   COUNT_ARGS(__VA_ARGS__))(kmalloc, __VA_ARGS__)
+
+#define kzalloc_obj(...)                                       \
+       CONCATENATE(__alloc_obj,                                \
+                   COUNT_ARGS(__VA_ARGS__))(kzalloc, __VA_ARGS__)
+
+#define kvmalloc_obj(...)                                      \
+       CONCATENATE(__alloc_obj,                                \
+                   COUNT_ARGS(__VA_ARGS__))(kvmalloc, __VA_ARGS__)
+
+#define kvzalloc_obj(...)                                      \
+       CONCATENATE(__alloc_obj,                                \
+                   COUNT_ARGS(__VA_ARGS__))(kvzalloc, __VA_ARGS__)
+
 static __always_inline __alloc_size(1) void *kmalloc_node_noprof(size_t size, 
gfp_t flags, int node)
 {
        if (__builtin_constant_p(size) && size) {
-- 
2.34.1


Reply via email to