Author: Nicolas Truessel <ntrues...@njsm.de>
Branch: quad-color-gc
Changeset: r87128:4fa6b4f42a48
Date: 2016-09-15 23:10 +0200
http://bitbucket.org/pypy/pypy/changeset/4fa6b4f42a48/

Log:    Probably last optimization import from qcgc repo

diff --git a/rpython/rtyper/tool/rffi_platform.py 
b/rpython/rtyper/tool/rffi_platform.py
--- a/rpython/rtyper/tool/rffi_platform.py
+++ b/rpython/rtyper/tool/rffi_platform.py
@@ -899,8 +899,8 @@
             separate_module_sources = [separate_source],  # XXX
             separate_module_files = [os.path.join(library_dir, f) for f in
                 ['qcgc.c', 'src/allocator.c', 'src/arena.c', 'src/bag.c',
-                 'src/collector.c', 'src/event_logger.c', 'src/gray_stack.c',
-                 'src/hugeblocktable.c', 'src/shadow_stack.c',
+                 'src/collector.c', 'src/event_logger.c',
+                 'src/hugeblocktable.c', 'src/object_stack.c',
                  'src/signal_handler.c', 'src/weakref.c']],
             )
     return eci
diff --git a/rpython/translator/c/src/qcgc/config.h 
b/rpython/translator/c/src/qcgc/config.h
--- a/rpython/translator/c/src/qcgc/config.h
+++ b/rpython/translator/c/src/qcgc/config.h
@@ -10,7 +10,7 @@
  */
 #define EVENT_LOG 1                                                    // 
Enable event log
 #define LOGFILE "./qcgc_events.log"                    // Default logfile
-#define LOG_ALLOCATION 1                                       // Enable 
allocation log
+#define LOG_ALLOCATION 0                                       // Enable 
allocation log
 #define LOG_DUMP_FREELIST_STATS 1                      // Dump freelist stats
 
 #define QCGC_SHADOWSTACK_SIZE 163840           // Total shadowstack size
diff --git a/rpython/translator/c/src/qcgc/qcgc.c 
b/rpython/translator/c/src/qcgc/qcgc.c
--- a/rpython/translator/c/src/qcgc/qcgc.c
+++ b/rpython/translator/c/src/qcgc/qcgc.c
@@ -25,17 +25,14 @@
 QCGC_STATIC QCGC_INLINE void initialize_shadowstack(void);
 QCGC_STATIC QCGC_INLINE void destroy_shadowstack(void);
 
-QCGC_STATIC object_t *bump_allocate(size_t size);
-
 void qcgc_initialize(void) {
        initialize_shadowstack();
-       qcgc_state.prebuilt_objects = qcgc_shadow_stack_create(16); // XXX
+       qcgc_state.prebuilt_objects = qcgc_object_stack_create(16); // XXX
        qcgc_state.weakrefs = qcgc_weakref_bag_create(16); // XXX
-       qcgc_state.gp_gray_stack = qcgc_gray_stack_create(16); // XXX
+       qcgc_state.gp_gray_stack = qcgc_object_stack_create(16); // XXX
        qcgc_state.gray_stack_size = 0;
        qcgc_state.phase = GC_PAUSE;
        qcgc_state.cells_since_incmark = 0;
-       qcgc_state.cells_since_collect = 0;
        qcgc_state.incmark_since_sweep = 0;
        qcgc_state.free_cells = 0;
        qcgc_state.largest_free_block = 0;
@@ -61,12 +58,103 @@
        free(qcgc_state.gp_gray_stack);
 }
 
-object_t *qcgc_allocate(size_t size) {
-#if LOG_ALLOCATION
+object_t *_qcgc_allocate_large(size_t size) {
+#if CHECKED
+       assert(size >= 1<<QCGC_LARGE_ALLOC_THRESHOLD_EXP);
+#endif
+       if (UNLIKELY(qcgc_state.cells_since_incmark >
+                               qcgc_state.incmark_threshold)) {
+               if (qcgc_state.incmark_since_sweep == 
qcgc_state.incmark_to_sweep) {
+                       qcgc_collect();
+               } else {
+                       qcgc_incmark();
+                       qcgc_state.incmark_since_sweep++;
+               }
+       }
+
+       // FIXME: alligned_alloc requires size to be a multiple of the alignment
+       object_t *result = aligned_alloc(QCGC_ARENA_SIZE, size);
+#if QCGC_INIT_ZERO
+       memset(result, 0, size);
+#endif
+       qcgc_hbtable_insert(result);
+       result->flags = QCGC_GRAY_FLAG;
+
+       qcgc_state.cells_since_incmark += bytes_to_cells(size);
+
+       return result;
+}
+
+object_t *_qcgc_allocate_slowpath(size_t size) {
+       bool use_fit_allocator = _qcgc_bump_allocator.ptr == NULL;
        size_t cells = bytes_to_cells(size);
-       qcgc_event_logger_log(EVENT_ALLOCATE, sizeof(size_t),
-                       (uint8_t *) &cells);
+
+       if (UNLIKELY(qcgc_state.cells_since_incmark >
+                               qcgc_state.incmark_threshold)) {
+               if (qcgc_state.incmark_since_sweep == 
qcgc_state.incmark_to_sweep) {
+                       qcgc_reset_bump_ptr();
+                       qcgc_collect();
+                       use_fit_allocator = false; // Try using bump allocator 
again
+               } else {
+                       qcgc_incmark();
+                       qcgc_state.incmark_since_sweep++;
+               }
+       }
+
+       object_t *result = NULL;
+       if (!use_fit_allocator) {
+               qcgc_bump_allocator_renew_block(false);
+
+               qcgc_state.cells_since_incmark += _qcgc_bump_allocator.end -
+                       _qcgc_bump_allocator.ptr;
+
+               cell_t *new_bump_ptr = _qcgc_bump_allocator.ptr + cells;
+               if (_qcgc_bump_allocator.ptr != NULL &&
+                               new_bump_ptr <= _qcgc_bump_allocator.end) {
+                       // Bump allocate
+                       
qcgc_arena_set_blocktype(qcgc_arena_addr(_qcgc_bump_allocator.ptr),
+                                       
qcgc_arena_cell_index(_qcgc_bump_allocator.ptr),
+                                       BLOCK_WHITE);
+
+                       result = (object_t *) _qcgc_bump_allocator.ptr;
+                       _qcgc_bump_allocator.ptr = new_bump_ptr;
+
+#if QCGC_INIT_ZERO
+                       memset(result, 0, cells * sizeof(cell_t));
 #endif
+
+                       result->flags = QCGC_GRAY_FLAG;
+                       return result;
+               }
+       }
+
+       // Fit allocate
+       if (result != NULL) {
+               qcgc_state.cells_since_incmark += bytes_to_cells(size);
+               return result;
+       }
+       qcgc_bump_allocator_renew_block(true);
+       qcgc_state.cells_since_incmark +=
+               _qcgc_bump_allocator.end - _qcgc_bump_allocator.ptr;
+
+       cell_t *new_bump_ptr = _qcgc_bump_allocator.ptr + cells;
+       qcgc_arena_set_blocktype(qcgc_arena_addr(_qcgc_bump_allocator.ptr),
+                       qcgc_arena_cell_index(_qcgc_bump_allocator.ptr),
+                       BLOCK_WHITE);
+
+       result = (object_t *) _qcgc_bump_allocator.ptr;
+       _qcgc_bump_allocator.ptr = new_bump_ptr;
+
+#if QCGC_INIT_ZERO
+       memset(result, 0, cells * sizeof(cell_t));
+#endif
+
+       result->flags = QCGC_GRAY_FLAG;
+       return result;
+}
+
+/*
+object_t *_qcgc_allocate_slowpath(size_t size) {
        object_t *result;
 
        if (UNLIKELY(qcgc_state.cells_since_incmark >
@@ -79,27 +167,20 @@
                }
        }
 
-       if (LIKELY(size <= 1<<QCGC_LARGE_ALLOC_THRESHOLD_EXP)) {
-               // Use bump / fit allocator
-               if (qcgc_allocator_state.use_bump_allocator) {
+       // Use bump / fit allocator
+       if (_qcgc_bump_allocator.ptr != NULL) {
+               result = bump_allocate(size);
+       } else {
+               result = qcgc_fit_allocate(size);
+
+               // Fallback to bump allocator
+               if (result == NULL) {
                        result = bump_allocate(size);
-               } else {
-                       result = qcgc_fit_allocate(size);
-
-                       // Fallback to bump allocator
-                       if (result == NULL) {
-                               result = bump_allocate(size);
-                       }
                }
-               qcgc_state.free_cells -= bytes_to_cells(size);
-       } else {
-               // Use huge block allocator
-               result = qcgc_large_allocate(size);
        }
-       qcgc_state.cells_since_incmark += bytes_to_cells(size);
-       qcgc_state.cells_since_collect += bytes_to_cells(size);
        return result;
 }
+*/
 
 void qcgc_collect(void) {
        qcgc_mark();
@@ -121,7 +202,7 @@
        if (((object->flags & QCGC_PREBUILT_OBJECT) != 0) &&
                        ((object->flags & QCGC_PREBUILT_REGISTERED) == 0)) {
                object->flags |= QCGC_PREBUILT_REGISTERED;
-               qcgc_state.prebuilt_objects = qcgc_shadow_stack_push(
+               qcgc_state.prebuilt_objects = qcgc_object_stack_push(
                                qcgc_state.prebuilt_objects, object);
        }
 
@@ -137,13 +218,13 @@
                // NOTE: No mark test here, as prebuilt objects are always 
reachable
                // Push prebuilt object to general purpose gray stack
                qcgc_state.gray_stack_size++;
-               qcgc_state.gp_gray_stack = qcgc_gray_stack_push(
+               qcgc_state.gp_gray_stack = qcgc_object_stack_push(
                                qcgc_state.gp_gray_stack, object);
        } else if ((object_t *) qcgc_arena_addr((cell_t *) object) == object) {
                if (qcgc_hbtable_is_marked(object)) {
                        // Push huge block to general purpose gray stack
                        qcgc_state.gray_stack_size++;
-                       qcgc_state.gp_gray_stack = qcgc_gray_stack_push(
+                       qcgc_state.gp_gray_stack = qcgc_object_stack_push(
                                        qcgc_state.gp_gray_stack, object);
                }
        } else {
@@ -152,7 +233,7 @@
                        // This was black before, push it to gray stack again
                        arena_t *arena = qcgc_arena_addr((cell_t *) object);
                        qcgc_state.gray_stack_size++;
-                       arena->gray_stack = qcgc_gray_stack_push(
+                       arena->gray_stack = qcgc_object_stack_push(
                                        arena->gray_stack, object);
                }
        }
@@ -188,21 +269,13 @@
        assert(stack != NULL);
        mprotect(_trap_page_addr(stack), 4096, PROT_NONE);
 
-       qcgc_shadowstack.top = stack;
-       qcgc_shadowstack.base = stack;
+       _qcgc_shadowstack.top = stack;
+       _qcgc_shadowstack.base = stack;
 }
 
 QCGC_STATIC void destroy_shadowstack(void) {
-       mprotect(_trap_page_addr(qcgc_shadowstack.base), 4096, PROT_READ |
+       mprotect(_trap_page_addr(_qcgc_shadowstack.base), 4096, PROT_READ |
                                PROT_WRITE);
 
-       free(qcgc_shadowstack.base);
+       free(_qcgc_shadowstack.base);
 }
-
-QCGC_STATIC object_t *bump_allocate(size_t size) {
-       if (UNLIKELY(qcgc_allocator_state.bump_state.remaining_cells <
-                       bytes_to_cells(size))) {
-               qcgc_bump_allocator_renew_block();
-       }
-       return qcgc_bump_allocate(size);
-}
diff --git a/rpython/translator/c/src/qcgc/qcgc.h 
b/rpython/translator/c/src/qcgc/qcgc.h
--- a/rpython/translator/c/src/qcgc/qcgc.h
+++ b/rpython/translator/c/src/qcgc/qcgc.h
@@ -2,32 +2,180 @@
  * @file       qcgc.h
  */
 
-#pragma once
+#ifndef __QCGC_H
+#define __QCGC_H
 
 #include "config.h"
 
 #include <assert.h>
 #include <stddef.h>
 #include <stdint.h>
+#include <string.h>
+
+#include "src/event_logger.h"
+
+/*******************************************************************************
+ * Types and global state                                                      
*
+ 
******************************************************************************/
 
 /**
  * Object Layout.
  */
+typedef struct object_s {
+       uint32_t flags;
+} object_t;
+
 #define QCGC_GRAY_FLAG (1<<0)
 #define QCGC_PREBUILT_OBJECT (1<<1)
 #define QCGC_PREBUILT_REGISTERED (1<<2)
 
-typedef struct object_s {
-       uint32_t flags;
-} object_t;
-
 /**
  * Shadow stack
  */
 struct qcgc_shadowstack {
        object_t **top;
        object_t **base;
-} qcgc_shadowstack;
+} _qcgc_shadowstack;
+
+/**
+ * The smallest unit of memory that can be addressed and allocated.
+ */
+typedef uint8_t cell_t[16];
+
+/**
+ * Bump allocator
+ */
+struct qcgc_bump_allocator {
+       cell_t *ptr;
+       cell_t *end;
+} _qcgc_bump_allocator;
+
+/**
+ * Object stack
+ */
+typedef struct object_stack_s {
+       size_t count;
+       size_t size;
+       object_t *items[];
+} object_stack_t;
+
+/**
+ * Arena
+ */
+
+#define QCGC_ARENA_SIZE (1<<QCGC_ARENA_SIZE_EXP)
+
+#define QCGC_ARENA_BITMAP_SIZE (1<<(QCGC_ARENA_SIZE_EXP - 7)) // 1 / 128
+#define QCGC_ARENA_CELLS_COUNT (1<<(QCGC_ARENA_SIZE_EXP - 4))
+
+#define QCGC_ARENA_FIRST_CELL_INDEX (1<<(QCGC_ARENA_SIZE_EXP - 10))
+
+typedef union {
+       struct {
+               union {
+                       object_stack_t *gray_stack;
+                       uint8_t block_bitmap[QCGC_ARENA_BITMAP_SIZE];
+               };
+               uint8_t mark_bitmap[QCGC_ARENA_BITMAP_SIZE];
+       };
+       cell_t cells[QCGC_ARENA_CELLS_COUNT];
+} arena_t;
+
+typedef enum blocktype {
+       BLOCK_EXTENT,
+       BLOCK_FREE,
+       BLOCK_WHITE,
+       BLOCK_BLACK,
+} blocktype_t;
+
+/*******************************************************************************
+ * Internal functions                                                          
*
+ 
******************************************************************************/
+
+/**
+ * Allocate large block. May trigger garbage collection.
+ *
+ * @param      size    Object size in bytes
+ * @return     Pointer to memory region large enough to hold size bytes or 
NULL in
+ *                     case of errros
+ */
+object_t *_qcgc_allocate_large(size_t size);
+
+/**
+ * Allocator slowpath. May trigger garabge collection.
+ *
+ * @param      size    Object size in bytes
+ * @return     Pointer to memory region large enough to hold size bytes or 
NULL in
+ *                     case of errros
+ */
+object_t *_qcgc_allocate_slowpath(size_t size);
+
+/**
+ * Turns bytes to cells.
+ */
+QCGC_STATIC QCGC_INLINE size_t bytes_to_cells(size_t bytes) {
+       return (bytes + sizeof(cell_t) - 1) / sizeof(cell_t);
+}
+
+/**
+ * Arena pointer for given cell.
+ *
+ * @param      ptr             Pointer to cell for which you want to know the 
corresponding
+ *                                     arena
+ * @return     The arena the pointer belongs to
+ */
+QCGC_STATIC QCGC_INLINE arena_t *qcgc_arena_addr(cell_t *ptr) {
+       return (arena_t *)((intptr_t) ptr & ~(QCGC_ARENA_SIZE - 1));
+}
+
+/**
+ * Index of cell in arena.
+ *
+ * @param      ptr             Pointer to cell for which you want to know the 
cell index
+ * @return     Index of the cell to which ptr points to
+ */
+QCGC_STATIC QCGC_INLINE size_t qcgc_arena_cell_index(cell_t *ptr) {
+       return (size_t)((intptr_t) ptr & (QCGC_ARENA_SIZE - 1)) >> 4;
+}
+
+/**
+ * Set blocktype.
+ *
+ * @param      ptr             Pointer to cell for which you want to set the 
blocktype
+ * @param      type    Blocktype that should be set
+ */
+QCGC_STATIC QCGC_INLINE void qcgc_arena_set_blocktype(arena_t *arena,
+               size_t index, blocktype_t type) {
+#if CHECKED
+       assert(arena != NULL);
+       assert(index >= QCGC_ARENA_FIRST_CELL_INDEX);
+       assert(index < QCGC_ARENA_CELLS_COUNT);
+#endif
+       size_t byte = index / 8;
+       uint8_t mask = 0x1 << (index % 8);
+       switch(type) {
+               case BLOCK_EXTENT:
+                       arena->block_bitmap[byte] &= ~mask;
+                       arena->mark_bitmap[byte] &= ~mask;
+                       break;
+               case BLOCK_FREE:
+                       arena->block_bitmap[byte] &= ~mask;
+                       arena->mark_bitmap[byte] |= mask;
+                       break;
+               case BLOCK_WHITE:
+                       arena->block_bitmap[byte] |= mask;
+                       arena->mark_bitmap[byte] &= ~mask;
+                       break;
+               case BLOCK_BLACK:
+                       arena->block_bitmap[byte] |= mask;
+                       arena->mark_bitmap[byte] |= mask;
+                       break;
+       }
+}
+
+/*******************************************************************************
+ * Public functions                                                            
*
+ 
******************************************************************************/
 
 /**
  * Initialize the garbage collector.
@@ -46,7 +194,41 @@
  * @return     Pointer to memory region large enough to hold size bytes or 
NULL in
  *                     case of errros
  */
-object_t *qcgc_allocate(size_t size);
+QCGC_STATIC QCGC_INLINE object_t *qcgc_allocate(size_t size) {
+#if CHECKED
+       assert(size > 0);
+#endif
+       size_t cells = bytes_to_cells(size);
+
+#if LOG_ALLOCATION
+       qcgc_event_logger_log(EVENT_ALLOCATE, sizeof(size_t),
+                       (uint8_t *) &cells);
+#endif
+       if (UNLIKELY(size >= 1<<QCGC_LARGE_ALLOC_THRESHOLD_EXP)) {
+               return _qcgc_allocate_large(size);
+       }
+
+       cell_t *new_bump_ptr = _qcgc_bump_allocator.ptr + cells;
+       // XXX: UNLIKELY?
+       if (new_bump_ptr > _qcgc_bump_allocator.end) {
+               return _qcgc_allocate_slowpath(size);
+       }
+
+       qcgc_arena_set_blocktype(qcgc_arena_addr(_qcgc_bump_allocator.ptr),
+                       qcgc_arena_cell_index(_qcgc_bump_allocator.ptr),
+                       BLOCK_WHITE);
+
+       object_t *result = (object_t *) _qcgc_bump_allocator.ptr;
+       _qcgc_bump_allocator.ptr = new_bump_ptr;
+
+
+#if QCGC_INIT_ZERO
+       memset(result, 0, cells * sizeof(cell_t));
+#endif
+
+       result->flags = QCGC_GRAY_FLAG;
+       return result;
+}
 
 /**
  * Push root object.
@@ -54,8 +236,8 @@
  * @param      object  The root object
  */
 QCGC_STATIC QCGC_INLINE void qcgc_push_root(object_t *object) {
-       *qcgc_shadowstack.top = object;
-       qcgc_shadowstack.top++;
+       *_qcgc_shadowstack.top = object;
+       _qcgc_shadowstack.top++;
 }
 
 /**
@@ -64,8 +246,8 @@
  * @param      count   Number of object to pop
  */
 QCGC_STATIC QCGC_INLINE void qcgc_pop_root(size_t count) {
-       qcgc_shadowstack.top -= count;
-       assert(qcgc_shadowstack.base <= qcgc_shadowstack.top);
+       _qcgc_shadowstack.top -= count;
+       assert(_qcgc_shadowstack.base <= _qcgc_shadowstack.top);
 }
 
 /**
@@ -100,3 +282,5 @@
  * @param      visit   The function to be called on the referenced objects
  */
 extern void qcgc_trace_cb(object_t *object, void (*visit)(object_t *object));
+
+#endif
diff --git a/rpython/translator/c/src/qcgc/src/allocator.c 
b/rpython/translator/c/src/qcgc/src/allocator.c
--- a/rpython/translator/c/src/qcgc/src/allocator.c
+++ b/rpython/translator/c/src/qcgc/src/allocator.c
@@ -20,12 +20,6 @@
                qcgc_arena_bag_create(QCGC_ARENA_BAG_INIT_SIZE);
        qcgc_allocator_state.free_arenas = qcgc_arena_bag_create(4); // XXX
 
-       qcgc_allocator_state.use_bump_allocator = true;
-
-       // Bump Allocator
-       qcgc_allocator_state.bump_state.bump_ptr = NULL;
-       qcgc_allocator_state.bump_state.remaining_cells = 0;
-
        // Fit Allocator
        for (size_t i = 0; i < QCGC_SMALL_FREE_LISTS; i++) {
                qcgc_allocator_state.fit_state.small_free_list[i] =
@@ -36,6 +30,10 @@
                qcgc_allocator_state.fit_state.large_free_list[i] =
                        
qcgc_exp_free_list_create(QCGC_LARGE_FREE_LIST_INIT_SIZE);
        }
+
+       _qcgc_bump_allocator.ptr = NULL;
+       _qcgc_bump_allocator.end = NULL;
+       qcgc_bump_allocator_renew_block(true);
 }
 
 void qcgc_allocator_destroy(void) {
@@ -67,31 +65,20 @@
  * Bump Allocator                                                              
*
  
******************************************************************************/
 
-void qcgc_bump_allocator_renew_block(void) {
+void qcgc_bump_allocator_renew_block(bool force_arena) {
 #if CHECKED
-       if (qcgc_allocator_state.bump_state.remaining_cells > 0) {
-               for (size_t i = 1; i < 
qcgc_allocator_state.bump_state.remaining_cells;
-                               i++) {
-                       assert(qcgc_arena_get_blocktype(qcgc_arena_addr(
-                                                       
qcgc_allocator_state.bump_state.bump_ptr + i),
-                                               qcgc_arena_cell_index(
-                                                       
qcgc_allocator_state.bump_state.bump_ptr + i))
+       if (_qcgc_bump_allocator.end > _qcgc_bump_allocator.ptr) {
+               for (cell_t *it = _qcgc_bump_allocator.ptr + 1;
+                               it < _qcgc_bump_allocator.end; it++) {
+                       assert(qcgc_arena_get_blocktype(qcgc_arena_addr(it),
+                                               qcgc_arena_cell_index(it))
                                        == BLOCK_EXTENT);
                }
        }
 #endif
-       // Add remaining memory to fit allocator
-       if (qcgc_allocator_state.bump_state.remaining_cells > 0) {
-               qcgc_arena_set_blocktype(
-                               
qcgc_arena_addr(qcgc_allocator_state.bump_state.bump_ptr),
-                               qcgc_arena_cell_index(
-                                       
qcgc_allocator_state.bump_state.bump_ptr),
-                               BLOCK_FREE);
-               qcgc_fit_allocator_add(qcgc_allocator_state.bump_state.bump_ptr,
-                               
qcgc_allocator_state.bump_state.remaining_cells);
-       }
+       qcgc_reset_bump_ptr();
 
-       // Try finding some huge block from fit allocator
+       // Always use a huge block if there is one
        exp_free_list_t *free_list = qcgc_allocator_state.fit_state.
                large_free_list[QCGC_LARGE_FREE_LISTS - 1];
 
@@ -100,33 +87,45 @@
                bump_allocator_assign(free_list->items[0].ptr,
                                free_list->items[0].size);
                free_list = qcgc_exp_free_list_remove_index(free_list, 0);
+               qcgc_allocator_state.fit_state.
+                       large_free_list[QCGC_LARGE_FREE_LISTS - 1] = free_list;
+               qcgc_state.free_cells -=
+                       _qcgc_bump_allocator.end - _qcgc_bump_allocator.ptr;
        } else {
-               // Grab a new arena
-               arena_t *arena = qcgc_arena_create();
-               qcgc_state.free_cells += QCGC_ARENA_CELLS_COUNT -
-                       QCGC_ARENA_FIRST_CELL_INDEX;
-               
bump_allocator_assign(&(arena->cells[QCGC_ARENA_FIRST_CELL_INDEX]),
-                               QCGC_ARENA_CELLS_COUNT - 
QCGC_ARENA_FIRST_CELL_INDEX);
-               qcgc_allocator_state.arenas =
-                       qcgc_arena_bag_add(qcgc_allocator_state.arenas, arena);
+               if (qcgc_allocator_state.free_arenas->count > 0) {
+                       // Reuse arena
+                       arena_t *arena = 
qcgc_allocator_state.free_arenas->items[0];
+                       qcgc_allocator_state.free_arenas = 
qcgc_arena_bag_remove_index(
+                                       qcgc_allocator_state.free_arenas, 0);
+                       
bump_allocator_assign(&(arena->cells[QCGC_ARENA_FIRST_CELL_INDEX]),
+                                       QCGC_ARENA_CELLS_COUNT - 
QCGC_ARENA_FIRST_CELL_INDEX);
+                       qcgc_allocator_state.arenas =
+                               qcgc_arena_bag_add(qcgc_allocator_state.arenas, 
arena);
+               } else {
+                       // FIXME: Nifty decision making whether to allocate new 
arena
+                       bool new_arena = false;
+                       if (force_arena || new_arena) {
+                               arena_t *arena = qcgc_arena_create();
+                               
bump_allocator_assign(&(arena->cells[QCGC_ARENA_FIRST_CELL_INDEX]),
+                                               QCGC_ARENA_CELLS_COUNT - 
QCGC_ARENA_FIRST_CELL_INDEX);
+                               qcgc_allocator_state.arenas =
+                                       
qcgc_arena_bag_add(qcgc_allocator_state.arenas, arena);
+                       }
+               }
        }
-
-       qcgc_allocator_state.fit_state.
-               large_free_list[QCGC_LARGE_FREE_LISTS - 1] = free_list;
 #if CHECKED
-       assert(qcgc_allocator_state.bump_state.bump_ptr != NULL);
-       assert(qcgc_arena_get_blocktype(
-                               qcgc_arena_addr( 
qcgc_allocator_state.bump_state.bump_ptr),
-                               qcgc_arena_cell_index(
-                                       
qcgc_allocator_state.bump_state.bump_ptr))
-                       == BLOCK_FREE);
-       for (size_t i = 1; i < qcgc_allocator_state.bump_state.remaining_cells;
-                       i++) {
-               assert(qcgc_arena_get_blocktype(qcgc_arena_addr(
-                                               
qcgc_allocator_state.bump_state.bump_ptr + i),
+       assert(!force_arena || _qcgc_bump_allocator.ptr != NULL);
+       if (_qcgc_bump_allocator.ptr != NULL) {
+               assert(qcgc_arena_get_blocktype(
+                                       qcgc_arena_addr( 
_qcgc_bump_allocator.ptr),
                                        qcgc_arena_cell_index(
-                                               
qcgc_allocator_state.bump_state.bump_ptr + i))
-                               == BLOCK_EXTENT);
+                                               _qcgc_bump_allocator.ptr))
+                               == BLOCK_FREE);
+               for (cell_t *it = _qcgc_bump_allocator.ptr + 1;
+                               it < _qcgc_bump_allocator.end; it++) {
+                       assert(qcgc_arena_get_blocktype(qcgc_arena_addr(it),
+                                               qcgc_arena_cell_index(it)) == 
BLOCK_EXTENT);
+               }
        }
 #endif
 }
@@ -140,8 +139,8 @@
                                        qcgc_arena_cell_index(ptr + i)) == 
BLOCK_EXTENT);
        }
 #endif
-       qcgc_allocator_state.bump_state.bump_ptr = ptr;
-       qcgc_allocator_state.bump_state.remaining_cells = cells;
+       _qcgc_bump_allocator.ptr = ptr;
+       _qcgc_bump_allocator.end = ptr + cells;
 }
 
 
/*******************************************************************************
@@ -173,6 +172,7 @@
        memset(result, 0, cells * sizeof(cell_t));
 #endif
 
+       qcgc_state.free_cells -= cells;
        result->flags = QCGC_GRAY_FLAG;
        return result;
 }
@@ -204,6 +204,7 @@
                                        
qcgc_allocator_state.fit_state.large_free_list[index],
                                        (struct exp_free_list_item_s) {ptr, 
cells});
        }
+       qcgc_state.free_cells += cells;
 }
 
 QCGC_STATIC cell_t *fit_allocator_small_first_fit(size_t index, size_t cells) {
@@ -227,6 +228,7 @@
                                qcgc_arena_set_blocktype(qcgc_arena_addr(result 
+ cells),
                                                qcgc_arena_cell_index(result + 
cells), BLOCK_FREE);
                                qcgc_fit_allocator_add(result + cells, 
list_cell_size - cells);
+                               qcgc_state.free_cells -= list_cell_size - cells;
                        }
                        return result;
                }
@@ -274,6 +276,7 @@
                        qcgc_arena_set_blocktype(qcgc_arena_addr(result + 
cells),
                                        qcgc_arena_cell_index(result + cells), 
BLOCK_FREE);
                        qcgc_fit_allocator_add(result + cells, best_fit_cells - 
cells);
+                       qcgc_state.free_cells -= best_fit_cells - cells;
                }
        } else {
                // No best fit, go for first fit
@@ -301,6 +304,7 @@
                                
qcgc_arena_set_blocktype(qcgc_arena_addr(item.ptr + cells),
                                                qcgc_arena_cell_index(item.ptr 
+ cells), BLOCK_FREE);
                                qcgc_fit_allocator_add(item.ptr + cells, 
item.size - cells);
+                               qcgc_state.free_cells -= item.size - cells;
                        }
                        return item.ptr;
                }
diff --git a/rpython/translator/c/src/qcgc/src/allocator.h 
b/rpython/translator/c/src/qcgc/src/allocator.h
--- a/rpython/translator/c/src/qcgc/src/allocator.h
+++ b/rpython/translator/c/src/qcgc/src/allocator.h
@@ -41,15 +41,10 @@
 struct qcgc_allocator_state {
        arena_bag_t *arenas;
        arena_bag_t *free_arenas;
-       struct bump_state {
-               cell_t *bump_ptr;
-               size_t remaining_cells;
-       } bump_state;
        struct fit_state {
                linear_free_list_t *small_free_list[QCGC_SMALL_FREE_LISTS];
                exp_free_list_t *large_free_list[QCGC_LARGE_FREE_LISTS];
        } fit_state;
-       bool use_bump_allocator;
 } qcgc_allocator_state;
 
 /**
@@ -77,7 +72,6 @@
  */
 void qcgc_fit_allocator_empty_lists(void);
 
-
 /**
  * Add memory to free lists
  *
@@ -86,58 +80,26 @@
  */
 void qcgc_fit_allocator_add(cell_t *ptr, size_t cells);
 
-QCGC_STATIC QCGC_INLINE size_t bytes_to_cells(size_t bytes) {
-       return (bytes + sizeof(cell_t) - 1) / sizeof(cell_t);
+/**
+ * Reset bump pointer
+ */
+QCGC_STATIC QCGC_INLINE void qcgc_reset_bump_ptr(void) {
+       if (_qcgc_bump_allocator.end > _qcgc_bump_allocator.ptr) {
+               qcgc_arena_set_blocktype(
+                               qcgc_arena_addr(_qcgc_bump_allocator.ptr),
+                               qcgc_arena_cell_index(
+                                       _qcgc_bump_allocator.ptr),
+                               BLOCK_FREE);
+               qcgc_fit_allocator_add(_qcgc_bump_allocator.ptr,
+                               _qcgc_bump_allocator.end - 
_qcgc_bump_allocator.ptr);
+       }
+       _qcgc_bump_allocator.ptr = NULL;
+       _qcgc_bump_allocator.end = NULL;
 }
 
 /**
  * Find a new block for the bump allocator
+ *
+ * @param      force_arena     Force generation of new arena if no block is 
found
  */
-void qcgc_bump_allocator_renew_block(void);
-
-/**
- * Allocate new memory region using bump allocator.
- * Bump allocator must have enough space for desired bytes
- * (client is responsible, use qcgc_bump_allocator_renew_block)
- *
- * @param      bytes   Desired size of the memory region in bytes
- * @return     Pointer to memory large enough to hold size bytes, NULL in case 
of
- *                     errors, already zero initialized if QCGC_INIT_ZERO is 
set
- */
-QCGC_STATIC QCGC_INLINE object_t *qcgc_bump_allocate(size_t bytes) {
-       size_t cells = bytes_to_cells(bytes);
-
-       cell_t *mem = qcgc_allocator_state.bump_state.bump_ptr;
-
-       qcgc_arena_set_blocktype(qcgc_arena_addr(mem), 
qcgc_arena_cell_index(mem),
-                       BLOCK_WHITE);
-
-       qcgc_allocator_state.bump_state.bump_ptr += cells;
-       qcgc_allocator_state.bump_state.remaining_cells -= cells;
-
-       object_t *result = (object_t *) mem;
-
-#if QCGC_INIT_ZERO
-       memset(result, 0, cells * sizeof(cell_t));
-#endif
-
-       result->flags = QCGC_GRAY_FLAG;
-       return result;
-}
-
-/**
- * Allocate new memory region using huge block allocator
- *
- * @param      bytes   Desired size of the memory region in bytes
- * @return     Pointer to memory large enough to hold size bytes, NULL in case 
of
- *                     errors, already zero initialized if QCGC_INIT_ZERO is 
set
- */
-QCGC_STATIC QCGC_INLINE object_t *qcgc_large_allocate(size_t bytes) {
-       object_t *result = aligned_alloc(QCGC_ARENA_SIZE, bytes);
-#if QCGC_INIT_ZERO
-       memset(result, 0, bytes);
-#endif
-       qcgc_hbtable_insert(result);
-       result->flags = QCGC_GRAY_FLAG;
-       return result;
-}
+void qcgc_bump_allocator_renew_block(bool force_arena);
diff --git a/rpython/translator/c/src/qcgc/src/arena.c 
b/rpython/translator/c/src/qcgc/src/arena.c
--- a/rpython/translator/c/src/qcgc/src/arena.c
+++ b/rpython/translator/c/src/qcgc/src/arena.c
@@ -12,6 +12,7 @@
 #include "allocator.h"
 #include "event_logger.h"
 #include "gc_state.h"
+#include "object_stack.h"
 
 arena_t *qcgc_arena_create(void) {
        qcgc_event_logger_log(EVENT_NEW_ARENA, 0, NULL);
@@ -45,7 +46,7 @@
        result->mark_bitmap[QCGC_ARENA_FIRST_CELL_INDEX / 8] = 1;
 
        // Create gray stack
-       result->gray_stack = qcgc_gray_stack_create(QCGC_GRAY_STACK_INIT_SIZE);
+       result->gray_stack = 
qcgc_object_stack_create(QCGC_GRAY_STACK_INIT_SIZE);
        return result;
 }
 
@@ -94,7 +95,7 @@
 #if CHECKED
        assert(arena != NULL);
        assert(qcgc_arena_is_coalesced(arena));
-       assert(qcgc_arena_addr(qcgc_allocator_state.bump_state.bump_ptr) == 
arena);
+       assert(qcgc_arena_addr(_qcgc_bump_allocator.ptr) == arena);
 #endif
        // Ignore free cell / largest block counting here, as blocks are not
        // registerd in free lists as well
@@ -121,7 +122,7 @@
        assert(arena != NULL);
        assert(qcgc_arena_is_coalesced(arena));
 #endif
-       if (qcgc_arena_addr(qcgc_allocator_state.bump_state.bump_ptr) == arena) 
{
+       if (qcgc_arena_addr(_qcgc_bump_allocator.ptr) == arena) {
                return qcgc_arena_pseudo_sweep(arena);
        }
 
@@ -176,7 +177,6 @@
                                                memset(arena->cells + 
last_free_cell, 0,
                                                                sizeof(cell_t) 
* (cell - last_free_cell));
 #endif
-                                               qcgc_state.free_cells += cell - 
last_free_cell;
                                                qcgc_state.largest_free_block = 
MAX(
                                                                
qcgc_state.largest_free_block,
                                                                cell - 
last_free_cell);
@@ -195,7 +195,6 @@
                memset(arena->cells + last_free_cell, 0,
                                sizeof(cell_t) * (QCGC_ARENA_CELLS_COUNT - 
last_free_cell));
 #endif
-               qcgc_state.free_cells += QCGC_ARENA_CELLS_COUNT - 
last_free_cell;
                qcgc_state.largest_free_block = MAX(
                                qcgc_state.largest_free_block,
                                QCGC_ARENA_CELLS_COUNT - last_free_cell);
diff --git a/rpython/translator/c/src/qcgc/src/arena.h 
b/rpython/translator/c/src/qcgc/src/arena.h
--- a/rpython/translator/c/src/qcgc/src/arena.h
+++ b/rpython/translator/c/src/qcgc/src/arena.h
@@ -9,51 +9,6 @@
 #include <stdbool.h>
 #include <sys/types.h>
 
-#include "gray_stack.h"
-
-#define QCGC_ARENA_SIZE (1<<QCGC_ARENA_SIZE_EXP)
-
-#define QCGC_ARENA_BITMAP_SIZE (1<<(QCGC_ARENA_SIZE_EXP - 7)) // 1 / 128
-#define QCGC_ARENA_CELLS_COUNT (1<<(QCGC_ARENA_SIZE_EXP - 4))
-
-#define QCGC_ARENA_FIRST_CELL_INDEX (1<<(QCGC_ARENA_SIZE_EXP - 10))
-
-/**
- * @typedef cell_t
- * The smallest unit of memory that can be addressed and allocated in arenas.
- */
-typedef uint8_t cell_t[16];
-
-/**
- * @typedef arena_t
- * Arena object
- */
-typedef union {
-       struct {
-               union {
-                       gray_stack_t *gray_stack;
-                       uint8_t block_bitmap[QCGC_ARENA_BITMAP_SIZE];
-               };
-               uint8_t mark_bitmap[QCGC_ARENA_BITMAP_SIZE];
-       };
-       cell_t cells[QCGC_ARENA_CELLS_COUNT];
-} arena_t;
-
-/**
- * @typedef blocktype_t
- * Blocktypes:
- * - BLOCK_EXTENT      Extension of previous block
- * - BLOCK_FREE                Free block
- * - BLOCK_WHITE       Allocated block, marked white
- * - BLOCK_BLACK       Allocated block, marked black
- */
-typedef enum blocktype {
-       BLOCK_EXTENT,
-       BLOCK_FREE,
-       BLOCK_WHITE,
-       BLOCK_BLACK,
-} blocktype_t;
-
 /**
  * Create a new arena.
  *
@@ -107,27 +62,6 @@
  
******************************************************************************/
 
 /**
- * Arena pointer for given cell.
- *
- * @param      ptr             Pointer to cell for which you want to know the 
corresponding
- *                                     arena
- * @return     The arena the pointer belongs to
- */
-QCGC_STATIC QCGC_INLINE arena_t *qcgc_arena_addr(cell_t *ptr) {
-       return (arena_t *)((intptr_t) ptr & ~(QCGC_ARENA_SIZE - 1));
-}
-
-/**
- * Index of cell in arena.
- *
- * @param      ptr             Pointer to cell for which you want to know the 
cell index
- * @return     Index of the cell to which ptr points to
- */
-QCGC_STATIC QCGC_INLINE size_t qcgc_arena_cell_index(cell_t *ptr) {
-       return (size_t)((intptr_t) ptr & (QCGC_ARENA_SIZE - 1)) >> 4;
-}
-
-/**
  * Get blocktype.
  *
  * @param      arena           Arena in which to perform the lookup
@@ -162,41 +96,6 @@
        }
 }
 
-/**
- * Set blocktype.
- *
- * @param      ptr             Pointer to cell for which you want to set the 
blocktype
- * @param      type    Blocktype that should be set
- */
-QCGC_STATIC QCGC_INLINE void qcgc_arena_set_blocktype(arena_t *arena,
-               size_t index, blocktype_t type) {
-#if CHECKED
-       assert(arena != NULL);
-       assert(index >= QCGC_ARENA_FIRST_CELL_INDEX);
-       assert(index < QCGC_ARENA_CELLS_COUNT);
-#endif
-       size_t byte = index / 8;
-       uint8_t mask = 0x1 << (index % 8);
-       switch(type) {
-               case BLOCK_EXTENT:
-                       arena->block_bitmap[byte] &= ~mask;
-                       arena->mark_bitmap[byte] &= ~mask;
-                       break;
-               case BLOCK_FREE:
-                       arena->block_bitmap[byte] &= ~mask;
-                       arena->mark_bitmap[byte] |= mask;
-                       break;
-               case BLOCK_WHITE:
-                       arena->block_bitmap[byte] |= mask;
-                       arena->mark_bitmap[byte] &= ~mask;
-                       break;
-               case BLOCK_BLACK:
-                       arena->block_bitmap[byte] |= mask;
-                       arena->mark_bitmap[byte] |= mask;
-                       break;
-       }
-}
-
 
/*******************************************************************************
  * Debug functions                                                             
*
  
******************************************************************************/
diff --git a/rpython/translator/c/src/qcgc/src/collector.c 
b/rpython/translator/c/src/qcgc/src/collector.c
--- a/rpython/translator/c/src/qcgc/src/collector.c
+++ b/rpython/translator/c/src/qcgc/src/collector.c
@@ -12,16 +12,19 @@
 QCGC_STATIC void mark_setup(bool incremental);
 QCGC_STATIC void mark_cleanup(bool incremental);
 
+QCGC_STATIC void check_free_cells(void);
+QCGC_STATIC void check_largest_free_block(void);
+
 void qcgc_mark(void) {
        mark_setup(false);
 
        while (qcgc_state.gray_stack_size > 0) {
                // General purpose gray stack (prebuilt objects and huge blocks)
 
-               while (qcgc_state.gp_gray_stack->index > 0) {
-                       object_t *top = 
qcgc_gray_stack_top(qcgc_state.gp_gray_stack);
+               while (qcgc_state.gp_gray_stack->count > 0) {
+                       object_t *top = 
qcgc_object_stack_top(qcgc_state.gp_gray_stack);
                        qcgc_state.gray_stack_size--;
-                       qcgc_state.gp_gray_stack = qcgc_gray_stack_pop(
+                       qcgc_state.gp_gray_stack = qcgc_object_stack_pop(
                                        qcgc_state.gp_gray_stack);
                        qcgc_pop_object(top);
                }
@@ -30,10 +33,10 @@
                for (size_t i = 0; i < qcgc_allocator_state.arenas->count; i++) 
{
                        arena_t *arena = qcgc_allocator_state.arenas->items[i];
 
-                       while (arena->gray_stack->index > 0) {
-                               object_t *top = 
qcgc_gray_stack_top(arena->gray_stack);
+                       while (arena->gray_stack->count > 0) {
+                               object_t *top = 
qcgc_object_stack_top(arena->gray_stack);
                                qcgc_state.gray_stack_size--;
-                               arena->gray_stack = 
qcgc_gray_stack_pop(arena->gray_stack);
+                               arena->gray_stack = 
qcgc_object_stack_pop(arena->gray_stack);
                                qcgc_pop_object(top);
                        }
                }
@@ -50,13 +53,13 @@
        mark_setup(true);
 
        // General purpose gray stack (prebuilt objects and huge blocks)
-       size_t to_process = MAX(qcgc_state.gp_gray_stack->index / 2,
+       size_t to_process = MAX(qcgc_state.gp_gray_stack->count / 2,
                        QCGC_INC_MARK_MIN);
 
-       while (to_process > 0 && qcgc_state.gp_gray_stack->index > 0) {
-               object_t *top = qcgc_gray_stack_top(qcgc_state.gp_gray_stack);
+       while (to_process > 0 && qcgc_state.gp_gray_stack->count > 0) {
+               object_t *top = qcgc_object_stack_top(qcgc_state.gp_gray_stack);
                qcgc_state.gray_stack_size--;
-               qcgc_state.gp_gray_stack = qcgc_gray_stack_pop(
+               qcgc_state.gp_gray_stack = qcgc_object_stack_pop(
                                qcgc_state.gp_gray_stack);
                qcgc_pop_object(top);
                to_process--;
@@ -65,12 +68,12 @@
        // Arena gray stacks
        for (size_t i = 0; i < qcgc_allocator_state.arenas->count; i++) {
                arena_t *arena = qcgc_allocator_state.arenas->items[i];
-               to_process = MAX(arena->gray_stack->index / 2, 
QCGC_INC_MARK_MIN);
+               to_process = MAX(arena->gray_stack->count / 2, 
QCGC_INC_MARK_MIN);
 
-               while (to_process > 0 && arena->gray_stack->index > 0) {
-                       object_t *top = qcgc_gray_stack_top(arena->gray_stack);
+               while (to_process > 0 && arena->gray_stack->count > 0) {
+                       object_t *top = 
qcgc_object_stack_top(arena->gray_stack);
                        qcgc_state.gray_stack_size--;
-                       arena->gray_stack = 
qcgc_gray_stack_pop(arena->gray_stack);
+                       arena->gray_stack = 
qcgc_object_stack_pop(arena->gray_stack);
                        qcgc_pop_object(top);
                        to_process--;
                }
@@ -104,7 +107,7 @@
                size_t count = qcgc_state.prebuilt_objects->count;
                for (size_t i = 0; i < count; i++) {
                        qcgc_state.gray_stack_size++;
-                       qcgc_state.gp_gray_stack = qcgc_gray_stack_push(
+                       qcgc_state.gp_gray_stack = qcgc_object_stack_push(
                                        qcgc_state.gp_gray_stack,
                                        qcgc_state.prebuilt_objects->items[i]);
                }
@@ -113,8 +116,8 @@
        qcgc_state.phase = GC_MARK;
 
        // Always push all roots to make shadowstack pushes faster
-       for (object_t **it = qcgc_shadowstack.base;
-               it < qcgc_shadowstack.top;
+       for (object_t **it = _qcgc_shadowstack.base;
+               it < _qcgc_shadowstack.top;
                it++) {
                qcgc_push_object(*it);
        }
@@ -163,7 +166,7 @@
                                // Did mark it / was white before
                                object->flags |= QCGC_GRAY_FLAG;
                                qcgc_state.gray_stack_size++;
-                               qcgc_state.gp_gray_stack = qcgc_gray_stack_push(
+                               qcgc_state.gp_gray_stack = 
qcgc_object_stack_push(
                                                qcgc_state.gp_gray_stack, 
object);
                        }
                        return;
@@ -176,7 +179,7 @@
                        object->flags |= QCGC_GRAY_FLAG;
                        qcgc_arena_set_blocktype(arena, index, BLOCK_BLACK);
                        qcgc_state.gray_stack_size++;
-                       arena->gray_stack = 
qcgc_gray_stack_push(arena->gray_stack, object);
+                       arena->gray_stack = 
qcgc_object_stack_push(arena->gray_stack, object);
                }
        }
 }
@@ -184,6 +187,7 @@
 void qcgc_sweep(void) {
 #if CHECKED
        assert(qcgc_state.phase == GC_COLLECT);
+       check_free_cells();
 #endif
        {
                struct log_info_s {
@@ -225,38 +229,11 @@
        // Fragmenation = 1 - (largest block / total free space)
        // Use bump allocator when fragmentation < 50%
 #if CHECKED
-       size_t free_cells = 0;
-       size_t largest_free_block = 0;
-               for (size_t i = 0; i < QCGC_SMALL_FREE_LISTS; i++) {
-                       if 
(qcgc_allocator_state.fit_state.small_free_list[i]->count > 0) {
-                               largest_free_block = i + 1;
-                       }
-                       free_cells += 
qcgc_allocator_state.fit_state.small_free_list[i]
-                               ->count * (i + 1);
-               }
-               for (size_t i = 0; i < QCGC_LARGE_FREE_LISTS; i++) {
-                       for (size_t j = 0; j < qcgc_allocator_state.fit_state.
-                                       large_free_list[i]->count; j++) {
-                               free_cells += 
qcgc_allocator_state.fit_state.large_free_list[i]
-                                       ->items[j].size;
-                               largest_free_block = MAX(largest_free_block,
-                                       
qcgc_allocator_state.fit_state.large_free_list[i]
-                                       ->items[j].size);
-                       }
-               }
-       assert(free_cells == qcgc_state.free_cells);
-       assert(largest_free_block == qcgc_state.largest_free_block);
-       assert(free_cells <= qcgc_allocator_state.arenas->count * 
(QCGC_ARENA_CELLS_COUNT - QCGC_ARENA_FIRST_CELL_INDEX));
-       assert(qcgc_allocator_state.arenas->count *
-                       (QCGC_ARENA_CELLS_COUNT - QCGC_ARENA_FIRST_CELL_INDEX) 
>=
-                       free_cells);
+       check_free_cells();
+       check_largest_free_block();
 #endif
-       qcgc_allocator_state.use_bump_allocator = qcgc_state.free_cells <
-               2 * qcgc_state.largest_free_block;
-
        update_weakrefs();
 
-       qcgc_state.free_cells += 
qcgc_allocator_state.bump_state.remaining_cells;
        {
                struct log_info_s {
                        size_t arenas;
@@ -296,3 +273,40 @@
 #endif
 }
 
+void check_free_cells(void) {
+       size_t free_cells = 0;
+               for (size_t i = 0; i < QCGC_SMALL_FREE_LISTS; i++) {
+                       free_cells += 
qcgc_allocator_state.fit_state.small_free_list[i]
+                               ->count * (i + 1);
+               }
+               for (size_t i = 0; i < QCGC_LARGE_FREE_LISTS; i++) {
+                       for (size_t j = 0; j < qcgc_allocator_state.fit_state.
+                                       large_free_list[i]->count; j++) {
+                               free_cells += 
qcgc_allocator_state.fit_state.large_free_list[i]
+                                       ->items[j].size;
+                       }
+               }
+       assert(free_cells == qcgc_state.free_cells);
+       assert(free_cells <= qcgc_allocator_state.arenas->count * 
(QCGC_ARENA_CELLS_COUNT - QCGC_ARENA_FIRST_CELL_INDEX));
+       assert(qcgc_allocator_state.arenas->count *
+                       (QCGC_ARENA_CELLS_COUNT - QCGC_ARENA_FIRST_CELL_INDEX) 
>=
+                       free_cells);
+}
+
+void check_largest_free_block(void) {
+       size_t largest_free_block = 0;
+               for (size_t i = 0; i < QCGC_SMALL_FREE_LISTS; i++) {
+                       if 
(qcgc_allocator_state.fit_state.small_free_list[i]->count > 0) {
+                               largest_free_block = i + 1;
+                       }
+               }
+               for (size_t i = 0; i < QCGC_LARGE_FREE_LISTS; i++) {
+                       for (size_t j = 0; j < qcgc_allocator_state.fit_state.
+                                       large_free_list[i]->count; j++) {
+                               largest_free_block = MAX(largest_free_block,
+                                       
qcgc_allocator_state.fit_state.large_free_list[i]
+                                       ->items[j].size);
+                       }
+               }
+       assert(largest_free_block == qcgc_state.largest_free_block);
+}
diff --git a/rpython/translator/c/src/qcgc/src/gc_state.h 
b/rpython/translator/c/src/qcgc/src/gc_state.h
--- a/rpython/translator/c/src/qcgc/src/gc_state.h
+++ b/rpython/translator/c/src/qcgc/src/gc_state.h
@@ -5,8 +5,7 @@
 #include <stddef.h>
 
 #include "bag.h"
-#include "gray_stack.h"
-#include "shadow_stack.h"
+#include "object_stack.h"
 
 /**
  * @typedef gc_state_t
@@ -27,14 +26,13 @@
  * Global state of the garbage collector
  */
 struct qcgc_state {
-       shadow_stack_t *prebuilt_objects;
+       object_stack_t *prebuilt_objects;
        weakref_bag_t *weakrefs;
-       gray_stack_t *gp_gray_stack;
+       object_stack_t *gp_gray_stack;
        size_t gray_stack_size;
        gc_phase_t phase;
 
        size_t cells_since_incmark;
-       size_t cells_since_collect;
        size_t incmark_since_sweep;
        size_t incmark_threshold;
        size_t incmark_to_sweep;
diff --git a/rpython/translator/c/src/qcgc/src/gray_stack.c 
b/rpython/translator/c/src/qcgc/src/gray_stack.c
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/gray_stack.c
+++ /dev/null
@@ -1,28 +0,0 @@
-#include "gray_stack.h"
-
-#include <stdlib.h>
-
-QCGC_STATIC size_t gray_stack_size(size_t size);
-
-gray_stack_t *qcgc_gray_stack_create(size_t size) {
-       gray_stack_t *result = (gray_stack_t *) malloc(gray_stack_size(size));
-       result->size = size;
-       result->index = 0;
-       return result;
-}
-
-QCGC_STATIC size_t gray_stack_size(size_t size) {
-       return (sizeof(gray_stack_t) + size * sizeof(object_t *));
-}
-
-gray_stack_t *gray_stack_grow(gray_stack_t *stack) {
-       stack = (gray_stack_t *) realloc(stack, gray_stack_size(stack->size * 
2));
-       stack->size *= 2;
-       return stack;
-}
-
-gray_stack_t *gray_stack_shrink(gray_stack_t *stack) {
-       stack = (gray_stack_t *) realloc(stack, gray_stack_size(stack->size / 
2));
-       stack->size /= 2;
-       return stack;
-}
diff --git a/rpython/translator/c/src/qcgc/src/gray_stack.h 
b/rpython/translator/c/src/qcgc/src/gray_stack.h
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/gray_stack.h
+++ /dev/null
@@ -1,46 +0,0 @@
-#pragma once
-
-#include "../qcgc.h"
-
-typedef struct gray_stack_s {
-       size_t index;
-       size_t size;
-       object_t *items[];
-} gray_stack_t;
-
-__attribute__ ((warn_unused_result))
-gray_stack_t *qcgc_gray_stack_create(size_t size);
-
-__attribute__ ((warn_unused_result))
-gray_stack_t *gray_stack_grow(gray_stack_t *stack);
-
-__attribute__ ((warn_unused_result))
-gray_stack_t *gray_stack_shrink(gray_stack_t *stack);
-
-__attribute__ ((warn_unused_result))
-QCGC_STATIC QCGC_INLINE gray_stack_t *qcgc_gray_stack_push(
-               gray_stack_t *stack, object_t *item) {
-       if (stack->size == stack->index) {
-               stack = gray_stack_grow(stack);
-       }
-       stack->items[stack->index] = item;
-       stack->index++;
-       return stack;
-}
-
-QCGC_STATIC QCGC_INLINE object_t *qcgc_gray_stack_top(gray_stack_t *stack) {
-#if CHECKED
-       assert(stack->index != 0);
-#endif
-       return stack->items[stack->index - 1];
-}
-
-__attribute__ ((warn_unused_result))
-QCGC_STATIC QCGC_INLINE gray_stack_t *qcgc_gray_stack_pop(gray_stack_t *stack) 
{
-       // TODO: Add lower bound for size (config.h)
-       if (stack->index < stack->size / 4) {
-               stack = gray_stack_shrink(stack);
-       }
-       stack->index--;
-       return stack;
-}
diff --git a/rpython/translator/c/src/qcgc/src/hugeblocktable.h 
b/rpython/translator/c/src/qcgc/src/hugeblocktable.h
--- a/rpython/translator/c/src/qcgc/src/hugeblocktable.h
+++ b/rpython/translator/c/src/qcgc/src/hugeblocktable.h
@@ -5,7 +5,6 @@
 #include <stdbool.h>
 
 #include "bag.h"
-#include "gray_stack.h"
 
 // Choosing a prime number, hoping for good results
 #define QCGC_HBTABLE_BUCKETS 61
diff --git a/rpython/translator/c/src/qcgc/src/object_stack.c 
b/rpython/translator/c/src/qcgc/src/object_stack.c
new file mode 100644
--- /dev/null
+++ b/rpython/translator/c/src/qcgc/src/object_stack.c
@@ -0,0 +1,30 @@
+#include "object_stack.h"
+
+#include <stdlib.h>
+
+QCGC_STATIC QCGC_INLINE size_t object_stack_size(size_t size);
+
+object_stack_t *qcgc_object_stack_create(size_t size) {
+       object_stack_t *result = (object_stack_t *) 
malloc(object_stack_size(size));
+       result->size = size;
+       result->count = 0;
+       return result;
+}
+
+QCGC_STATIC size_t object_stack_size(size_t size) {
+       return (sizeof(object_stack_t) + size * sizeof(object_t *));
+}
+
+object_stack_t *object_stack_grow(object_stack_t *stack) {
+       stack = (object_stack_t *) realloc(stack,
+                       object_stack_size(stack->size * 2));
+       stack->size *= 2;
+       return stack;
+}
+
+object_stack_t *object_stack_shrink(object_stack_t *stack) {
+       stack = (object_stack_t *) realloc(stack,
+                       object_stack_size(stack->size / 2));
+       stack->size /= 2;
+       return stack;
+}
diff --git a/rpython/translator/c/src/qcgc/src/object_stack.h 
b/rpython/translator/c/src/qcgc/src/object_stack.h
new file mode 100644
--- /dev/null
+++ b/rpython/translator/c/src/qcgc/src/object_stack.h
@@ -0,0 +1,41 @@
+#pragma once
+
+#include "../qcgc.h"
+
+__attribute__ ((warn_unused_result))
+object_stack_t *qcgc_object_stack_create(size_t size);
+
+__attribute__ ((warn_unused_result))
+object_stack_t *object_stack_grow(object_stack_t *stack);
+
+__attribute__ ((warn_unused_result))
+object_stack_t *object_stack_shrink(object_stack_t *stack);
+
+__attribute__ ((warn_unused_result))
+QCGC_STATIC QCGC_INLINE object_stack_t *qcgc_object_stack_push(
+               object_stack_t *stack, object_t *item) {
+       if (stack->size == stack->count) {
+               stack = object_stack_grow(stack);
+       }
+       stack->items[stack->count] = item;
+       stack->count++;
+       return stack;
+}
+
+QCGC_STATIC QCGC_INLINE object_t *qcgc_object_stack_top(object_stack_t *stack) 
{
+#if CHECKED
+       assert(stack->count != 0);
+#endif
+       return stack->items[stack->count - 1];
+}
+
+__attribute__ ((warn_unused_result))
+QCGC_STATIC QCGC_INLINE object_stack_t *qcgc_object_stack_pop(
+               object_stack_t *stack) {
+       // TODO: Add lower bound for size (config.h)
+       if (stack->count < stack->size / 4) {
+               stack = object_stack_shrink(stack);
+       }
+       stack->count--;
+       return stack;
+}
diff --git a/rpython/translator/c/src/qcgc/src/shadow_stack.c 
b/rpython/translator/c/src/qcgc/src/shadow_stack.c
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/shadow_stack.c
+++ /dev/null
@@ -1,56 +0,0 @@
-#include "shadow_stack.h"
-
-#include <assert.h>
-#include <stdlib.h>
-
-QCGC_STATIC size_t shadow_stack_size(size_t size);
-QCGC_STATIC shadow_stack_t *shadow_stack_grow(shadow_stack_t *stack);
-QCGC_STATIC shadow_stack_t *shadow_stack_shrink(shadow_stack_t *stack);
-
-shadow_stack_t *qcgc_shadow_stack_create(size_t size) {
-       shadow_stack_t *result = (shadow_stack_t *) 
malloc(shadow_stack_size(size));
-       result->size = size;
-       result->count = 0;
-       return result;
-}
-
-shadow_stack_t *qcgc_shadow_stack_push(shadow_stack_t *stack, object_t *item) {
-       if (stack->size == stack->count) {
-               stack = shadow_stack_grow(stack);
-       }
-       stack->items[stack->count] = item;
-       stack->count++;
-       return stack;
-}
-
-object_t *qcgc_shadow_stack_top(shadow_stack_t *stack) {
-#if CHECKED
-       assert(stack->count != 0);
-#endif
-       return stack->items[stack->count - 1];
-}
-
-shadow_stack_t *qcgc_shadow_stack_pop(shadow_stack_t *stack) {
-       // TODO: Add lower bound for size (config.h)
-       if (stack->count < stack->size / 4) {
-               stack = shadow_stack_shrink(stack);
-       }
-       stack->count--;
-       return stack;
-}
-
-QCGC_STATIC size_t shadow_stack_size(size_t size) {
-       return (sizeof(shadow_stack_t) + size * sizeof(object_t *));
-}
-
-QCGC_STATIC shadow_stack_t *shadow_stack_grow(shadow_stack_t *stack) {
-       stack = (shadow_stack_t *) realloc(stack, shadow_stack_size(stack->size 
* 2));
-       stack->size *= 2;
-       return stack;
-}
-
-QCGC_STATIC shadow_stack_t *shadow_stack_shrink(shadow_stack_t *stack) {
-       stack = (shadow_stack_t *) realloc(stack, shadow_stack_size(stack->size 
/ 2));
-       stack->size /= 2;
-       return stack;
-}
diff --git a/rpython/translator/c/src/qcgc/src/shadow_stack.h 
b/rpython/translator/c/src/qcgc/src/shadow_stack.h
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/shadow_stack.h
+++ /dev/null
@@ -1,20 +0,0 @@
-#pragma once
-
-#include "../qcgc.h"
-
-typedef struct shadow_stack_s {
-       size_t count;
-       size_t size;
-       object_t *items[];
-} shadow_stack_t;
-
-__attribute__ ((warn_unused_result))
-shadow_stack_t *qcgc_shadow_stack_create(size_t size);
-
-__attribute__ ((warn_unused_result))
-shadow_stack_t *qcgc_shadow_stack_push(shadow_stack_t *stack, object_t *item);
-
-object_t *qcgc_shadow_stack_top(shadow_stack_t *stack);
-
-__attribute__ ((warn_unused_result))
-shadow_stack_t *qcgc_shadow_stack_pop(shadow_stack_t *stack);
diff --git a/rpython/translator/c/src/qcgc/src/signal_handler.c 
b/rpython/translator/c/src/qcgc/src/signal_handler.c
--- a/rpython/translator/c/src/qcgc/src/signal_handler.c
+++ b/rpython/translator/c/src/qcgc/src/signal_handler.c
@@ -43,7 +43,7 @@
 }
 
 QCGC_STATIC bool is_stack_overflow(void *addr) {
-       void *shadow_stack_end = (void *)(qcgc_shadowstack.base +
+       void *shadow_stack_end = (void *)(_qcgc_shadowstack.base +
                QCGC_SHADOWSTACK_SIZE);
        return (addr >= shadow_stack_end && addr < shadow_stack_end + 8192);
 }
_______________________________________________
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit

Reply via email to