* Lai Jiangshan ([email protected]) wrote:
> Add MIN_ALLOC_ORDER/MIN_ALLOC_SIZE for allocation.
> 
> !!Note: current MIN_ALLOC_ORDER is still 0 for debugging.

Please specify that this patch allows better cache locality by
specifying minimum allocation size.

> 
> Signed-off-by: Lai Jiangshan <[email protected]>
> ---
>  rculfhash.c |   29 ++++++++++++++++++++---------
>  1 files changed, 20 insertions(+), 9 deletions(-)
> 
> diff --git a/rculfhash.c b/rculfhash.c
> index 3aba9bc..4ec59f9 100644
> --- a/rculfhash.c
> +++ b/rculfhash.c
> @@ -165,10 +165,13 @@
>  #define CHAIN_LEN_TARGET             1
>  #define CHAIN_LEN_RESIZE_THRESHOLD   3
>  
> +#define MIN_ALLOC_ORDER                      0
> +#define MIN_ALLOC_SIZE                       (1UL << MIN_ALLOC_ORDER)
> +
>  /*
>   * Define the minimum table size.
>   */
> -#define MIN_TABLE_SIZE                       1
> +#define MIN_TABLE_SIZE                       MIN_ALLOC_SIZE

Instead of using #define for alloc/table size, shouldn't we leave that
as a single parameter to cds_lfht_new ? I think we could have:

   unsigned long init_size,
   unsigned long min_alloc_size,

that specify:

1) the initial table size
2) the smallest allocation size to use.

>  
>  #if (CAA_BITS_PER_LONG == 32)
>  #define MAX_TABLE_ORDER                      32
> @@ -1051,7 +1054,7 @@ void init_table_populate_partition(struct cds_lfht *ht, 
> unsigned long i,
>  {
>       unsigned long j;
>  
> -     assert(i);
> +     assert(i > MIN_ALLOC_ORDER);
>       ht->cds_lfht_rcu_read_lock();
>       for (j = start; j < start + len; j++) {
>               struct cds_lfht_node *new_node =
> @@ -1090,7 +1093,7 @@ void init_table(struct cds_lfht *ht,
>       dbg_printf("init table: first_order %lu end_order %lu\n",
>                  first_order, first_order + len_order);
>       end_order = first_order + len_order;
> -     assert(first_order > 0);
> +     assert(first_order > MIN_ALLOC_ORDER);
>       for (i = first_order; i < end_order; i++) {
>               unsigned long len;
>  
> @@ -1153,7 +1156,7 @@ void remove_table_partition(struct cds_lfht *ht, 
> unsigned long i,
>  {
>       unsigned long j;
>  
> -     assert(i);
> +     assert(i > MIN_ALLOC_ORDER);
>       ht->cds_lfht_rcu_read_lock();
>       for (j = start; j < start + len; j++) {
>               struct cds_lfht_node *fini_node =
> @@ -1192,7 +1195,7 @@ void fini_table(struct cds_lfht *ht,
>       dbg_printf("fini table: first_order %lu end_order %lu\n",
>                  first_order, first_order + len_order);
>       end_order = first_order + len_order;
> -     assert(first_order > 0);
> +     assert(first_order > MIN_ALLOC_ORDER);
>       for (i = end_order - 1; i >= first_order; i--) {
>               unsigned long len;
>  
> @@ -1243,7 +1246,7 @@ void cds_lfht_create_dummy(struct cds_lfht *ht, 
> unsigned long size)
>       struct _cds_lfht_node *prev, *node;
>       unsigned long order, len, i, j;
>  
> -     ht->t.tbl[0] = calloc(1, sizeof(struct _cds_lfht_node));
> +     ht->t.tbl[0] = calloc(1, MIN_ALLOC_SIZE * sizeof(struct 
> _cds_lfht_node));
>       assert(ht->t.tbl[0]);
>  
>       dbg_printf("create dummy: order %lu index %lu hash %lu\n", 0, 0, 0);
> @@ -1252,8 +1255,12 @@ void cds_lfht_create_dummy(struct cds_lfht *ht, 
> unsigned long size)
>  
>       for (order = 1; order < get_count_order_ulong(size) + 1; order++) {
>               len = 1UL << (order - 1);
> -             ht->t.tbl[order] = calloc(1, len * sizeof(struct 
> _cds_lfht_node));
> -             assert(ht->t.tbl[order]);
> +             if (order <= MIN_ALLOC_ORDER) {
> +                     ht->t.tbl[order] = (void *)(ht->t.tbl[0]->nodes + len);

why do we need the (void *) cast ? Is there any way we could preserve
typing ?

> +             } else {
> +                     ht->t.tbl[order] = calloc(1, len * sizeof(struct 
> _cds_lfht_node));
> +                     assert(ht->t.tbl[order]);
> +             }
>  
>               i = 0;
>               prev = ht->t.tbl[i]->nodes;
> @@ -1538,7 +1545,11 @@ int cds_lfht_delete_dummy(struct cds_lfht *ht)
>                               
> bit_reverse_ulong(ht->t.tbl[order]->nodes[i].reverse_hash));
>                       assert(is_dummy(ht->t.tbl[order]->nodes[i].next));
>               }
> -             poison_free(ht->t.tbl[order]);
> +
> +             if (order == MIN_ALLOC_ORDER)
> +                     poison_free(ht->t.tbl[0]);
> +             else if (order > MIN_ALLOC_ORDER)
> +                     poison_free(ht->t.tbl[order]);

The patch makes sense, please resend along with the next patch round
with the changes noted above,

Thanks!

Mathieu

>       }
>       return 0;
>  }
> -- 
> 1.7.4.4
> 

-- 
Mathieu Desnoyers
Operating System Efficiency R&D Consultant
EfficiOS Inc.
http://www.efficios.com

_______________________________________________
ltt-dev mailing list
[email protected]
http://lists.casi.polymtl.ca/cgi-bin/mailman/listinfo/ltt-dev

Reply via email to