Author: Nicolas Truessel <[email protected]>
Branch: quad-color-gc
Changeset: r87100:b4a1d5e8166e
Date: 2016-09-13 21:11 +0200
http://bitbucket.org/pypy/pypy/changeset/b4a1d5e8166e/
Log: Performance improvements
diff --git a/rpython/memory/gc/base.py b/rpython/memory/gc/base.py
--- a/rpython/memory/gc/base.py
+++ b/rpython/memory/gc/base.py
@@ -1,6 +1,7 @@
from rpython.rtyper.lltypesystem import lltype, llmemory, llarena, rffi
from rpython.rtyper.lltypesystem.lloperation import llop
from rpython.rlib.debug import ll_assert
+from rpython.rlib.objectmodel import func_with_new_name
from rpython.memory.gcheader import GCHeaderBuilder
from rpython.memory.support import DEFAULT_CHUNK_SIZE
from rpython.memory.support import get_address_stack, get_address_deque
@@ -234,6 +235,11 @@
i += 1
trace._annspecialcase_ = 'specialize:arg(2)'
+ tracei = func_with_new_name(trace, 'tracei')
+ tracei._annspecialcase_ = 'specialize:arg(2)'
+ tracei._always_inline_ = True
+
+
def _trace_slow_path(self, obj, callback, arg):
typeid = self.get_type_id(obj)
if self.has_gcptr_in_varsize(typeid):
diff --git a/rpython/memory/gc/qcgc.py b/rpython/memory/gc/qcgc.py
--- a/rpython/memory/gc/qcgc.py
+++ b/rpython/memory/gc/qcgc.py
@@ -4,6 +4,7 @@
from rpython.rtyper.lltypesystem.lloperation import llop
from rpython.rlib.debug import ll_assert
from rpython.rlib.rarithmetic import ovfcheck
+from rpython.rlib.objectmodel import specialize
QCGC_HAS_HASH = 0x100 # Upper half of flags for clients, lower half is reserved
QCGC_PREBUILT_OBJECT = 0x2 # XXX: exploits knowledge about qcgc library
@@ -13,6 +14,10 @@
moving_gc = False
needs_write_barrier = True
malloc_zero_filled = False
+
+ inline_simple_malloc = True
+ inline_simple_malloc_varsize = True
+
prebuilt_gc_objects_are_static_roots = True # XXX: ?
can_usually_pin_objects = False
object_minimal_size = 0
@@ -82,6 +87,7 @@
# XXX: Not supported
pass
+ @specialize.arg(2)
def id_or_identityhash(self, gcobj, is_hash):
obj = llmemory.cast_ptr_to_adr(gcobj)
hdr = self.header(obj)
@@ -92,6 +98,7 @@
if is_hash:
i = mangle_hash(i)
return i
+ id_or_identityhash._always_inline_ = True
def id(self, gcobje):
return self.id_or_identityhash(gcobj, False)
diff --git a/rpython/memory/gctransform/qcgcframework.py
b/rpython/memory/gctransform/qcgcframework.py
--- a/rpython/memory/gctransform/qcgcframework.py
+++ b/rpython/memory/gctransform/qcgcframework.py
@@ -19,16 +19,15 @@
#
s_gcref = SomePtr(llmemory.GCREF)
- #self.malloc_weakref_ptr = self._getfn(
- # GCClass.malloc_weakref.im_func,
- # [s_gc, s_typeid16, annmodel.SomeInteger(nonneg=True),
- # s_gcref], s_gcref)
- #
+
def invokecallback(root, visit_fn):
ref = llmemory.cast_adr_to_ptr(root, rffi.VOIDPP)
visit_fn(llmemory.cast_ptr_to_adr(ref[0]))
def pypy_trace_cb(obj, visit_fn):
- gc.trace(obj, invokecallback, visit_fn)
+ typeid = gc.get_type_id(obj)
+ if not gc.has_gcptr(typeid):
+ return
+ gc.tracei(obj, invokecallback, visit_fn)
pypy_trace_cb.c_name = "pypy_trace_cb"
self.autoregister_ptrs.append(
getfn(pypy_trace_cb,
@@ -57,8 +56,8 @@
return livevars
def pop_roots(self, hop, livevars):
- for _ in livevars: # Does not move, so no writing back
- hop.genop("qcgc_pop_root", [])
+ c_count = rmodel.inputconst(lltype.Signed, len(livevars))
+ hop.genop("qcgc_pop_roots", [c_count])
def gct_gc_fq_register(self, hop):
pass
diff --git a/rpython/rtyper/tool/rffi_platform.py
b/rpython/rtyper/tool/rffi_platform.py
--- a/rpython/rtyper/tool/rffi_platform.py
+++ b/rpython/rtyper/tool/rffi_platform.py
@@ -888,17 +888,20 @@
extern void pypy_trace_cb(void *, void (*)(void *));
- void qcgc_trace_cb(object_t *object, void (*visit)(object_t *object)) {
+ inline void qcgc_trace_cb(object_t *object, void (*visit)(object_t
*object)) {
pypy_trace_cb((void *) object, (void (*)(void *)) visit);
}
"""
eci = ExternalCompilationInfo(
- include_dirs = [library_dir],
+ include_dirs = [library_dir, os.path.join(library_dir, 'src')],
includes = ["qcgc.h"],
separate_module_sources = [separate_source], # XXX
separate_module_files = [os.path.join(library_dir, f) for f in
- ["qcgc.c"]],
+ ['qcgc.c', 'src/allocator.c', 'src/arena.c', 'src/bag.c',
+ 'src/collector.c', 'src/event_logger.c', 'src/gray_stack.c',
+ 'src/hugeblocktable.c', 'src/shadow_stack.c',
+ 'src/signal_handler.c', 'src/weakref.c']],
)
return eci
diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py
--- a/rpython/translator/c/funcgen.py
+++ b/rpython/translator/c/funcgen.py
@@ -948,8 +948,9 @@
obj = self.expr(op.args[0])
return 'qcgc_push_root((object_t *) %s);' % (obj,)
- def OP_QCGC_POP_ROOT(self, op):
- return 'qcgc_pop_root();'
+ def OP_QCGC_POP_ROOTS(self, op):
+ count = self.expr(op.args[0])
+ return 'qcgc_pop_root((size_t) %s);' % (count,)
def OP_QCGC_ALLOCATE(self, op):
# XXX: SET typeid
diff --git a/rpython/translator/c/src/qcgc/config.h
b/rpython/translator/c/src/qcgc/config.h
new file mode 100644
--- /dev/null
+++ b/rpython/translator/c/src/qcgc/config.h
@@ -0,0 +1,58 @@
+#pragma once
+
+#define CHECKED 0 //
Enable runtime sanity checks
+#define DEBUG_ZERO_ON_SWEEP 0 // Zero memory on sweep
(debug only)
+
+#define QCGC_INIT_ZERO 0 // Init new
objects with zero bytes
+
+/**
+ * Event logger
+ */
+#define EVENT_LOG 1 //
Enable event log
+#define LOGFILE "./qcgc_events.log" // Default logfile
+#define LOG_ALLOCATION 0 // Enable
allocation log (warning:
+
// significant performance impact)
+
+#define QCGC_SHADOWSTACK_SIZE 163840 // Total shadowstack size
+#define QCGC_ARENA_BAG_INIT_SIZE 16 // Initial size of the
arena bag
+#define QCGC_ARENA_SIZE_EXP 20 // Between 16 (64kB)
and 20 (1MB)
+#define QCGC_LARGE_ALLOC_THRESHOLD_EXP 14 // Less than QCGC_ARENA_SIZE_EXP
+#define QCGC_MARK_LIST_SEGMENT_SIZE 64 // TODO: Tune for performance
+#define QCGC_GRAY_STACK_INIT_SIZE 128 // TODO: Tune for performance
+#define QCGC_INC_MARK_MIN 64 // TODO: Tune for
performance
+
+/**
+ * Fit allocator
+ */
+#define QCGC_LARGE_FREE_LIST_FIRST_EXP 5 // First exponent of large free
list
+#define QCGC_LARGE_FREE_LIST_INIT_SIZE 4 // Initial size for large free
lists
+#define QCGC_SMALL_FREE_LIST_INIT_SIZE 16 // Initial size for small free
lists
+
+/**
+ * Auto Mark/Collect
+ */
+#define QCGC_MAJOR_COLLECTION_THRESHOLD (5 * (1<<QCGC_ARENA_SIZE_EXP))
+#define QCGC_INCMARK_THRESHOLD (1<<QCGC_ARENA_SIZE_EXP)
+
+/**
+ * DO NOT MODIFY BELOW HERE
+ */
+
+#if QCGC_LARGE_ALLOC_THRESHOLD_EXP >= QCGC_ARENA_SIZE_EXP
+#error "Inconsistent configuration. Huge block threshold must be smaller " \
+ "than the arena size."
+#endif
+
+#ifdef TESTING
+#define QCGC_STATIC
+#define QCGC_INLINE
+#else
+#define QCGC_STATIC static
+#define QCGC_INLINE inline __attribute((always_inline))
+#endif
+
+#define MAX(a,b) (((a)>(b))?(a):(b))
+#define MIN(a,b) (((a)<(b))?(a):(b))
+#define UNUSED(x) (void)(x)
+#define LIKELY(x) __builtin_expect((x), 1)
+#define UNLIKELY(x) __builtin_expect((x), 0)
diff --git a/rpython/translator/c/src/qcgc/qcgc.c
b/rpython/translator/c/src/qcgc/qcgc.c
--- a/rpython/translator/c/src/qcgc/qcgc.c
+++ b/rpython/translator/c/src/qcgc/qcgc.c
@@ -1,26 +1,199 @@
-#include "src/config.h"
+#include "qcgc.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <sys/mman.h>
+
#include "src/allocator.h"
-#include "src/arena.h"
-#include "src/bag.h"
#include "src/collector.h"
-#include "src/core.h"
#include "src/event_logger.h"
#include "src/gc_state.h"
-#include "src/gray_stack.h"
#include "src/hugeblocktable.h"
-#include "src/object.h"
-#include "src/shadow_stack.h"
#include "src/signal_handler.h"
-#include "src/weakref.h"
-#include "src/allocator.c"
-#include "src/arena.c"
-#include "src/bag.c"
-#include "src/collector.c"
-#include "src/core.c"
-#include "src/event_logger.c"
-#include "src/gray_stack.c"
-#include "src/hugeblocktable.c"
-#include "src/shadow_stack.c"
-#include "src/signal_handler.c"
-#include "src/weakref.c"
+#define env_or_fallback(var, env_name, fallback) do { \
+ char *env_val = getenv(env_name);
\
+ if (env_val != NULL) {
\
+ if (1 != sscanf(env_val, "%zu", &var)) {
\
+ var = fallback;
\
+ }
\
+ } else {
\
+ var = fallback;
\
+ }
\
+} while(0)
+
+QCGC_STATIC QCGC_INLINE void initialize_shadowstack(void);
+QCGC_STATIC QCGC_INLINE void destroy_shadowstack(void);
+
+void qcgc_initialize(void) {
+ initialize_shadowstack();
+ qcgc_state.prebuilt_objects = qcgc_shadow_stack_create(16); // XXX
+ qcgc_state.weakrefs = qcgc_weakref_bag_create(16); // XXX
+ qcgc_state.gp_gray_stack = qcgc_gray_stack_create(16); // XXX
+ qcgc_state.gray_stack_size = 0;
+ qcgc_state.phase = GC_PAUSE;
+ qcgc_state.bytes_since_collection = 0;
+ qcgc_state.bytes_since_incmark = 0;
+ qcgc_state.free_cells = 0;
+ qcgc_state.largest_free_block = 0;
+ qcgc_allocator_initialize();
+ qcgc_hbtable_initialize();
+ qcgc_event_logger_initialize();
+
+ env_or_fallback(qcgc_state.major_collection_threshold,
+ "QCGC_MAJOR_COLLECTION",
QCGC_MAJOR_COLLECTION_THRESHOLD);
+ env_or_fallback(qcgc_state.incmark_threshold,
+ "QCGC_INCMARK", QCGC_INCMARK_THRESHOLD);
+
+ setup_signal_handler();
+}
+
+void qcgc_destroy(void) {
+ qcgc_event_logger_destroy();
+ qcgc_hbtable_destroy();
+ qcgc_allocator_destroy();
+ destroy_shadowstack();
+ free(qcgc_state.prebuilt_objects);
+ free(qcgc_state.weakrefs);
+ free(qcgc_state.gp_gray_stack);
+}
+
+object_t *qcgc_allocate(size_t size) {
+#if LOG_ALLOCATION
+ qcgc_event_logger_log(EVENT_ALLOCATE_START, sizeof(size_t),
+ (uint8_t *) &size);
+#endif
+ object_t *result;
+
+ if (qcgc_state.bytes_since_collection >
+ qcgc_state.major_collection_threshold) {
+ qcgc_collect();
+ }
+ if (qcgc_state.bytes_since_incmark > qcgc_state.incmark_threshold) {
+ qcgc_mark(true);
+ }
+
+ if (size <= 1<<QCGC_LARGE_ALLOC_THRESHOLD_EXP) {
+ // Use bump / fit allocator
+ //if (qcgc_allocator_state.use_bump_allocator) {
+ if (true) {
+ result = qcgc_bump_allocate(size);
+ } else {
+ result = qcgc_fit_allocate(size);
+
+ // Fallback to bump allocator
+ if (result == NULL) {
+ result = qcgc_bump_allocate(size);
+ }
+ }
+ } else {
+ // Use huge block allocator
+ result = qcgc_large_allocate(size);
+ }
+
+ // XXX: Should we use cells instead of bytes?
+ qcgc_state.bytes_since_collection += size;
+ qcgc_state.bytes_since_incmark += size;
+
+
+#if LOG_ALLOCATION
+ qcgc_event_logger_log(EVENT_ALLOCATE_DONE, sizeof(object_t *),
+ (uint8_t *) &result);
+#endif
+ return result;
+}
+
+void qcgc_collect(void) {
+ qcgc_mark(false);
+ qcgc_sweep();
+ qcgc_state.bytes_since_collection = 0;
+}
+
+void qcgc_write(object_t *object) {
+#if CHECKED
+ assert(object != NULL);
+#endif
+ if ((object->flags & QCGC_GRAY_FLAG) != 0) {
+ // Already gray, skip
+ return;
+ }
+ object->flags |= QCGC_GRAY_FLAG;
+
+ // Register prebuilt object if necessary
+ if (((object->flags & QCGC_PREBUILT_OBJECT) != 0) &&
+ ((object->flags & QCGC_PREBUILT_REGISTERED) == 0)) {
+ object->flags |= QCGC_PREBUILT_REGISTERED;
+ qcgc_state.prebuilt_objects = qcgc_shadow_stack_push(
+ qcgc_state.prebuilt_objects, object);
+ }
+
+ if (qcgc_state.phase == GC_PAUSE) {
+ return; // We are done
+ }
+
+ // Triggered barrier, we must not collect now
+ qcgc_state.phase = GC_MARK;
+
+ // Test reachability of object and push if neccessary
+ if ((object->flags & QCGC_PREBUILT_OBJECT) != 0) {
+ // NOTE: No mark test here, as prebuilt objects are always
reachable
+ // Push prebuilt object to general purpose gray stack
+ qcgc_state.gp_gray_stack = qcgc_gray_stack_push(
+ qcgc_state.gp_gray_stack, object);
+ } else if ((object_t *) qcgc_arena_addr((cell_t *) object) == object) {
+ if (qcgc_hbtable_is_marked(object)) {
+ // Push huge block to general purpose gray stack
+ qcgc_state.gp_gray_stack = qcgc_gray_stack_push(
+ qcgc_state.gp_gray_stack, object);
+ }
+ } else {
+ if (qcgc_arena_get_blocktype(qcgc_arena_addr((cell_t *) object),
+ qcgc_arena_cell_index((cell_t *)
object)) == BLOCK_BLACK) {
+ // This was black before, push it to gray stack again
+ arena_t *arena = qcgc_arena_addr((cell_t *) object);
+ arena->gray_stack = qcgc_gray_stack_push(
+ arena->gray_stack, object);
+ }
+ }
+}
+
+void qcgc_register_weakref(object_t *weakrefobj, object_t **target) {
+#if CHECKED
+ assert((weakrefobj->flags & QCGC_PREBUILT_OBJECT) == 0);
+ assert((object_t *) qcgc_arena_addr((cell_t *) weakrefobj) !=
weakrefobj);
+#endif
+ // NOTE: At this point, the target must point to a pointer to a valid
+ // object. We don't register any weakrefs to prebuilt objects as they
+ // are always valid.
+ if (((*target)->flags & QCGC_PREBUILT_OBJECT) == 0) {
+ qcgc_state.weakrefs = qcgc_weakref_bag_add(qcgc_state.weakrefs,
+ (struct weakref_bag_item_s) {
+ .weakrefobj = weakrefobj,
+ .target = target});
+ }
+}
+
+QCGC_STATIC QCGC_INLINE void *_trap_page_addr(object_t **shadow_stack) {
+ object_t **shadow_stack_end = shadow_stack + QCGC_SHADOWSTACK_SIZE;
+ char *in_trap_page = (((char *)shadow_stack_end) + 4095);
+ void *rounded_trap_page = (void *)(((uintptr_t)in_trap_page) & (~4095));
+ return rounded_trap_page;
+}
+
+QCGC_STATIC QCGC_INLINE void initialize_shadowstack(void) {
+ size_t stack_size = QCGC_SHADOWSTACK_SIZE * sizeof(object_t *);
+ // allocate stack + size for alignement + trap page
+ object_t **stack = (object_t **) malloc(stack_size + 8192);
+ assert(stack != NULL);
+ mprotect(_trap_page_addr(stack), 4096, PROT_NONE);
+
+ qcgc_shadowstack.top = stack;
+ qcgc_shadowstack.base = stack;
+}
+
+QCGC_STATIC void destroy_shadowstack(void) {
+ mprotect(_trap_page_addr(qcgc_shadowstack.base), 4096, PROT_READ |
+ PROT_WRITE);
+
+ free(qcgc_shadowstack.base);
+}
diff --git a/rpython/translator/c/src/qcgc/qcgc.h
b/rpython/translator/c/src/qcgc/qcgc.h
--- a/rpython/translator/c/src/qcgc/qcgc.h
+++ b/rpython/translator/c/src/qcgc/qcgc.h
@@ -4,9 +4,30 @@
#pragma once
+#include "config.h"
+
+#include <assert.h>
#include <stddef.h>
+#include <stdint.h>
-#include "src/object.h"
+/**
+ * Object Layout.
+ */
+#define QCGC_GRAY_FLAG (1<<0)
+#define QCGC_PREBUILT_OBJECT (1<<1)
+#define QCGC_PREBUILT_REGISTERED (1<<2)
+
+typedef struct object_s {
+ uint32_t flags;
+} object_t;
+
+/**
+ * Shadow stack
+ */
+struct qcgc_shadowstack {
+ object_t **top;
+ object_t **base;
+} qcgc_shadowstack;
/**
* Initialize the garbage collector.
@@ -32,12 +53,20 @@
*
* @param object The root object
*/
-void qcgc_push_root(object_t *object);
+QCGC_STATIC QCGC_INLINE void qcgc_push_root(object_t *object) {
+ *qcgc_shadowstack.top = object;
+ qcgc_shadowstack.top++;
+}
/**
- * Pop root object.
+ * Pop root objects.
+ *
+ * @param count Number of object to pop
*/
-void qcgc_pop_root(void);
+QCGC_STATIC QCGC_INLINE void qcgc_pop_root(size_t count) {
+ qcgc_shadowstack.top -= count;
+ assert(qcgc_shadowstack.base <= qcgc_shadowstack.top);
+}
/**
* Write barrier. Has to be called whenever a reference to another object is
diff --git a/rpython/translator/c/src/qcgc/src/allocator.c
b/rpython/translator/c/src/qcgc/src/allocator.c
--- a/rpython/translator/c/src/qcgc/src/allocator.c
+++ b/rpython/translator/c/src/qcgc/src/allocator.c
@@ -22,9 +22,6 @@
QCGC_STATIC cell_t *fit_allocator_large_fit(size_t index, size_t cells);
QCGC_STATIC cell_t *fit_allocator_large_first_fit(size_t index, size_t cells);
-QCGC_STATIC bool valid_block(cell_t *ptr, size_t cells);
-QCGC_STATIC void free_list_consistency_check(void);
-
void qcgc_allocator_initialize(void) {
qcgc_allocator_state.arenas =
qcgc_arena_bag_create(QCGC_ARENA_BAG_INIT_SIZE);
@@ -74,19 +71,6 @@
}
void qcgc_fit_allocator_add(cell_t *ptr, size_t cells) {
-#if CHECKED
- if (cells > 0) {
- assert((((object_t *)ptr)->flags & QCGC_PREBUILT_OBJECT) == 0);
- assert((cell_t *) qcgc_arena_addr(ptr) != ptr);
- assert(qcgc_arena_get_blocktype(ptr) == BLOCK_FREE);
- if (qcgc_arena_addr(ptr) == qcgc_arena_addr(ptr + cells)) {
- assert(qcgc_arena_get_blocktype(ptr + cells) !=
BLOCK_EXTENT);
- }
- for (size_t i = 1; i < cells; i++) {
- assert(qcgc_arena_get_blocktype(ptr + i) ==
BLOCK_EXTENT);
- }
- }
-#endif
if (cells > 0) {
if (is_small(cells)) {
size_t index = small_index(cells);
@@ -113,9 +97,12 @@
assert(bytes <= 1<<QCGC_LARGE_ALLOC_THRESHOLD_EXP);
#endif
size_t cells = bytes_to_cells(bytes);
- if (cells > qcgc_allocator_state.bump_state.remaining_cells) {
- if (qcgc_allocator_state.bump_state.remaining_cells > 0) {
-
qcgc_arena_set_blocktype(qcgc_allocator_state.bump_state.bump_ptr,
+ if (UNLIKELY(cells > qcgc_allocator_state.bump_state.remaining_cells)) {
+ if (LIKELY(qcgc_allocator_state.bump_state.remaining_cells >
0)) {
+ qcgc_arena_set_blocktype(
+
qcgc_arena_addr(qcgc_allocator_state.bump_state.bump_ptr),
+ qcgc_arena_cell_index(
+
qcgc_allocator_state.bump_state.bump_ptr),
BLOCK_FREE);
}
bump_allocator_renew_block();
@@ -123,7 +110,8 @@
cell_t *mem = qcgc_allocator_state.bump_state.bump_ptr;
bump_allocator_advance(cells);
- qcgc_arena_set_blocktype(mem, BLOCK_WHITE);
+ qcgc_arena_set_blocktype(qcgc_arena_addr(mem),
qcgc_arena_cell_index(mem),
+ BLOCK_WHITE);
/*
if (qcgc_allocator_state.bump_state.remaining_cells > 0) {
qcgc_arena_set_blocktype(qcgc_allocator_state.bump_state.bump_ptr,
@@ -143,8 +131,10 @@
if (qcgc_allocator_state.bump_state.remaining_cells > 0) {
for (size_t i = 1; i <
qcgc_allocator_state.bump_state.remaining_cells;
i++) {
- assert(qcgc_arena_get_blocktype(
-
qcgc_allocator_state.bump_state.bump_ptr + i)
+ assert(qcgc_arena_get_blocktype(qcgc_arena_addr(
+
qcgc_allocator_state.bump_state.bump_ptr + i),
+ qcgc_arena_cell_index(
+
qcgc_allocator_state.bump_state.bump_ptr + i))
== BLOCK_EXTENT);
}
}
@@ -156,11 +146,16 @@
#if CHECKED
if (qcgc_allocator_state.bump_state.remaining_cells > 0) {
assert(qcgc_arena_get_blocktype(
-
qcgc_allocator_state.bump_state.bump_ptr) == BLOCK_FREE);
+ qcgc_arena_addr(
qcgc_allocator_state.bump_state.bump_ptr),
+ qcgc_arena_cell_index(
+
qcgc_allocator_state.bump_state.bump_ptr))
+ == BLOCK_FREE);
for (size_t i = 1; i <
qcgc_allocator_state.bump_state.remaining_cells;
i++) {
- assert(qcgc_arena_get_blocktype(
-
qcgc_allocator_state.bump_state.bump_ptr + i)
+ assert(qcgc_arena_get_blocktype(qcgc_arena_addr(
+
qcgc_allocator_state.bump_state.bump_ptr + i),
+ qcgc_arena_cell_index(
+
qcgc_allocator_state.bump_state.bump_ptr + i))
== BLOCK_EXTENT);
}
}
@@ -191,12 +186,17 @@
large_free_list[QCGC_LARGE_FREE_LISTS - 1] = free_list;
#if CHECKED
assert(qcgc_allocator_state.bump_state.bump_ptr != NULL);
-
assert(qcgc_arena_get_blocktype(qcgc_allocator_state.bump_state.bump_ptr) ==
- BLOCK_FREE);
+ assert(qcgc_arena_get_blocktype(
+ qcgc_arena_addr(
qcgc_allocator_state.bump_state.bump_ptr),
+ qcgc_arena_cell_index(
+
qcgc_allocator_state.bump_state.bump_ptr))
+ == BLOCK_FREE);
for (size_t i = 1; i < qcgc_allocator_state.bump_state.remaining_cells;
i++) {
- assert(qcgc_arena_get_blocktype(
-
qcgc_allocator_state.bump_state.bump_ptr + i)
+ assert(qcgc_arena_get_blocktype(qcgc_arena_addr(
+
qcgc_allocator_state.bump_state.bump_ptr + i),
+ qcgc_arena_cell_index(
+
qcgc_allocator_state.bump_state.bump_ptr + i))
== BLOCK_EXTENT);
}
#endif
@@ -204,9 +204,11 @@
QCGC_STATIC void bump_allocator_assign(cell_t *ptr, size_t cells) {
#if CHECKED
- assert(qcgc_arena_get_blocktype(ptr) == BLOCK_FREE);
+ assert(qcgc_arena_get_blocktype(qcgc_arena_addr(ptr),
+ qcgc_arena_cell_index(ptr)) == BLOCK_FREE);
for (size_t i = 1; i < cells; i++) {
- assert(qcgc_arena_get_blocktype(ptr + i) == BLOCK_EXTENT);
+ assert(qcgc_arena_get_blocktype(qcgc_arena_addr(ptr + i),
+ qcgc_arena_cell_index(ptr + i)) ==
BLOCK_EXTENT);
}
#endif
qcgc_allocator_state.bump_state.bump_ptr = ptr;
@@ -289,9 +291,11 @@
qcgc_linear_free_list_remove_index(
qcgc_allocator_state.fit_state.small_free_list[index],
0);
- qcgc_arena_set_blocktype(result, BLOCK_WHITE);
+ qcgc_arena_set_blocktype(qcgc_arena_addr(result),
+ qcgc_arena_cell_index(result),
BLOCK_WHITE);
if (list_cell_size - cells > 0) {
- qcgc_arena_set_blocktype(result + cells,
BLOCK_FREE);
+ qcgc_arena_set_blocktype(qcgc_arena_addr(result
+ cells),
+ qcgc_arena_cell_index(result +
cells), BLOCK_FREE);
qcgc_fit_allocator_add(result + cells,
list_cell_size - cells);
}
return result;
@@ -334,9 +338,11 @@
qcgc_allocator_state.fit_state.large_free_list[index] =
qcgc_exp_free_list_remove_index(qcgc_allocator_state.fit_state.
large_free_list[index], best_fit_index);
- qcgc_arena_set_blocktype(result, BLOCK_WHITE);
+ qcgc_arena_set_blocktype(qcgc_arena_addr(result),
+ qcgc_arena_cell_index(result), BLOCK_WHITE);
if (best_fit_cells - cells > 0) {
- qcgc_arena_set_blocktype(result + cells, BLOCK_FREE);
+ qcgc_arena_set_blocktype(qcgc_arena_addr(result +
cells),
+ qcgc_arena_cell_index(result + cells),
BLOCK_FREE);
qcgc_fit_allocator_add(result + cells, best_fit_cells -
cells);
}
} else {
@@ -360,9 +366,11 @@
0);
qcgc_arena_mark_allocated(item.ptr, cells);
- qcgc_arena_set_blocktype(item.ptr, BLOCK_WHITE);
+ qcgc_arena_set_blocktype(qcgc_arena_addr(item.ptr),
+ qcgc_arena_cell_index(item.ptr),
BLOCK_WHITE);
if (item.size - cells > 0) {
- qcgc_arena_set_blocktype(item.ptr + cells,
BLOCK_FREE);
+
qcgc_arena_set_blocktype(qcgc_arena_addr(item.ptr + cells),
+ qcgc_arena_cell_index(item.ptr
+ cells), BLOCK_FREE);
qcgc_fit_allocator_add(item.ptr + cells,
item.size - cells);
}
return item.ptr;
@@ -404,41 +412,3 @@
#endif
return index + 1;
}
-
-QCGC_STATIC bool valid_block(cell_t *ptr, size_t cells) {
-#if CHECKED
- assert(ptr != NULL);
- assert(cells > 0);
-#endif
- return (qcgc_arena_get_blocktype(ptr) == BLOCK_FREE && (
- ((qcgc_arena_addr(ptr + cells)) == (arena_t *)
(ptr + cells)) ||
- qcgc_arena_get_blocktype(ptr + cells) !=
BLOCK_EXTENT));
-}
-
-QCGC_STATIC void free_list_consistency_check(void) {
- for (size_t i = 0; i < QCGC_SMALL_FREE_LISTS; i++) {
- linear_free_list_t *free_list =
- qcgc_allocator_state.fit_state.small_free_list[i];
- for (size_t j = 0; j < free_list->count; j++) {
- cell_t *item = free_list->items[j];
- if (qcgc_arena_get_blocktype(item) == BLOCK_FREE) {
- for (size_t s = 1; s < small_index_to_cells(i);
s++) {
- assert(qcgc_arena_get_blocktype(item +
s) == BLOCK_EXTENT);
- }
- }
- }
- }
-
- for (size_t i = 0; i < QCGC_LARGE_FREE_LISTS; i++) {
- exp_free_list_t *free_list =
- qcgc_allocator_state.fit_state.large_free_list[i];
- for (size_t j = 0; j < free_list->count; j++) {
- struct exp_free_list_item_s item = free_list->items[j];
- if (qcgc_arena_get_blocktype(item.ptr) == BLOCK_FREE) {
- for (size_t s = 1; s < item.size; s++) {
-
assert(qcgc_arena_get_blocktype(item.ptr + s) == BLOCK_EXTENT);
- }
- }
- }
- }
-}
diff --git a/rpython/translator/c/src/qcgc/src/allocator.h
b/rpython/translator/c/src/qcgc/src/allocator.h
--- a/rpython/translator/c/src/qcgc/src/allocator.h
+++ b/rpython/translator/c/src/qcgc/src/allocator.h
@@ -1,12 +1,9 @@
#pragma once
-#include "config.h"
-
-#include <stddef.h>
+#include "../qcgc.h"
#include "arena.h"
#include "bag.h"
-#include "object.h"
/**
* Free lists:
diff --git a/rpython/translator/c/src/qcgc/src/arena.c
b/rpython/translator/c/src/qcgc/src/arena.c
--- a/rpython/translator/c/src/qcgc/src/arena.c
+++ b/rpython/translator/c/src/qcgc/src/arena.c
@@ -13,12 +13,6 @@
#include "event_logger.h"
#include "gc_state.h"
-/**
- * Internal functions
- */
-QCGC_STATIC QCGC_INLINE blocktype_t get_blocktype(arena_t *arena, size_t
index);
-QCGC_STATIC QCGC_INLINE void set_blocktype(arena_t *arena, size_t index,
blocktype_t type);
-
arena_t *qcgc_arena_create(void) {
qcgc_event_logger_log(EVENT_NEW_ARENA, 0, NULL);
@@ -48,7 +42,7 @@
}
// Init bitmaps: One large free block
- qcgc_arena_set_bitmap_entry(result->mark_bitmap,
QCGC_ARENA_FIRST_CELL_INDEX, 1);
+ result->mark_bitmap[QCGC_ARENA_FIRST_CELL_INDEX / 8] = 1;
// Create gray stack
result->gray_stack = qcgc_gray_stack_create(QCGC_GRAY_STACK_INIT_SIZE);
@@ -63,121 +57,36 @@
munmap((void *) arena, QCGC_ARENA_SIZE);
}
-arena_t *qcgc_arena_addr(cell_t *ptr) {
- return (arena_t *)((intptr_t) ptr & ~(QCGC_ARENA_SIZE - 1));
-}
-
-size_t qcgc_arena_cell_index(cell_t *ptr) {
- return (size_t)((intptr_t) ptr & (QCGC_ARENA_SIZE - 1)) >> 4;
-}
-
-bool qcgc_arena_get_bitmap_entry(uint8_t *bitmap, size_t index) {
-#if CHECKED
- assert(bitmap != NULL);
-#endif
- size_t byte = index / 8;
- uint8_t mask = 0x01 << (index % 8);
- return ((bitmap[byte] & mask) == mask);
-}
-
-void qcgc_arena_set_bitmap_entry(uint8_t *bitmap, size_t index, uint8_t value)
{
-#if CHECKED
- assert(bitmap != NULL);
-#endif
- size_t byte = index / 8;
- uint8_t bit = (index % 8);
- bitmap[byte] &= ~(1 << bit);
- bitmap[byte] = (bitmap[byte] & ~(1 << bit)) | (value << bit);
-}
-
-QCGC_STATIC QCGC_INLINE blocktype_t get_blocktype(arena_t *arena, size_t
index) {
-#if CHECKED
- assert(arena != NULL);
-#endif
- uint8_t block_bit = qcgc_arena_get_bitmap_entry(arena->block_bitmap,
index);
- uint8_t mark_bit = qcgc_arena_get_bitmap_entry(arena->mark_bitmap,
index);
-
- if (block_bit) {
- if (mark_bit) {
- return BLOCK_BLACK;
- } else {
- return BLOCK_WHITE;
- }
- } else {
- if (mark_bit) {
- return BLOCK_FREE;
- } else {
- return BLOCK_EXTENT;
- }
- }
-}
-
-QCGC_STATIC QCGC_INLINE blocktype_t qcgc_arena_get_blocktype(cell_t *ptr) {
- size_t index = qcgc_arena_cell_index(ptr);
- arena_t *arena = qcgc_arena_addr(ptr);
-
- return get_blocktype(arena, index);
-}
-
-QCGC_STATIC QCGC_INLINE void set_blocktype(arena_t *arena, size_t index,
blocktype_t type) {
-#if CHECKED
- assert(arena != NULL);
-#endif
- switch(type) {
- case BLOCK_EXTENT:
- qcgc_arena_set_bitmap_entry(arena->block_bitmap, index,
0);
- qcgc_arena_set_bitmap_entry(arena->mark_bitmap, index,
0);
- break;
- case BLOCK_FREE:
- qcgc_arena_set_bitmap_entry(arena->block_bitmap, index,
0);
- qcgc_arena_set_bitmap_entry(arena->mark_bitmap, index,
1);
- break;
- case BLOCK_WHITE:
- qcgc_arena_set_bitmap_entry(arena->block_bitmap, index,
1);
- qcgc_arena_set_bitmap_entry(arena->mark_bitmap, index,
0);
- break;
- case BLOCK_BLACK:
- qcgc_arena_set_bitmap_entry(arena->mark_bitmap, index,
1);
- qcgc_arena_set_bitmap_entry(arena->block_bitmap, index,
1);
- break;
- }
-}
-
-QCGC_STATIC QCGC_INLINE void qcgc_arena_set_blocktype(cell_t *ptr,
- blocktype_t type) {
- size_t index = qcgc_arena_cell_index(ptr);
- arena_t *arena = qcgc_arena_addr(ptr);
- set_blocktype(arena, index, type);
-}
-
void qcgc_arena_mark_allocated(cell_t *ptr, size_t cells) {
size_t index = qcgc_arena_cell_index(ptr);
arena_t *arena = qcgc_arena_addr(ptr);
#if CHECKED
- assert(get_blocktype(arena, index) == BLOCK_FREE);
+ assert(qcgc_arena_get_blocktype(arena, index) == BLOCK_FREE);
for (size_t i = 1; i < cells; i++) {
- assert(get_blocktype(arena, index + i) == BLOCK_EXTENT);
+ assert(qcgc_arena_get_blocktype(arena, index + i) ==
BLOCK_EXTENT);
}
#endif
- set_blocktype(arena, index, BLOCK_WHITE);
+ qcgc_arena_set_blocktype(arena, index, BLOCK_WHITE);
size_t index_of_next_block = index + cells;
if (index_of_next_block < QCGC_ARENA_CELLS_COUNT &&
- get_blocktype(arena, index_of_next_block) ==
BLOCK_EXTENT) {
- set_blocktype(arena, index_of_next_block, BLOCK_FREE);
+ qcgc_arena_get_blocktype(arena, index_of_next_block) ==
+ BLOCK_EXTENT) {
+ qcgc_arena_set_blocktype(arena, index_of_next_block,
BLOCK_FREE);
}
#if CHECKED
- assert(get_blocktype(arena, index) == BLOCK_WHITE);
+ assert(qcgc_arena_get_blocktype(arena, index) == BLOCK_WHITE);
for (size_t i = 1; i < cells; i++) {
- assert(get_blocktype(arena, index + i) == BLOCK_EXTENT);
+ assert(qcgc_arena_get_blocktype(arena, index + i) ==
BLOCK_EXTENT);
}
if (index_of_next_block < QCGC_ARENA_CELLS_COUNT) {
- assert(get_blocktype(arena, index + cells) != BLOCK_EXTENT);
+ assert(qcgc_arena_get_blocktype(arena, index + cells) !=
BLOCK_EXTENT);
}
#endif
}
void qcgc_arena_mark_free(cell_t *ptr) {
- qcgc_arena_set_blocktype(ptr, BLOCK_FREE);
+ qcgc_arena_set_blocktype(qcgc_arena_addr(ptr),
qcgc_arena_cell_index(ptr),
+ BLOCK_FREE);
// No coalescing, collector will do this
}
@@ -192,9 +101,9 @@
for (size_t cell = QCGC_ARENA_FIRST_CELL_INDEX;
cell < QCGC_ARENA_CELLS_COUNT;
cell++) {
- switch (get_blocktype(arena, cell)) {
+ switch (qcgc_arena_get_blocktype(arena, cell)) {
case BLOCK_BLACK:
- set_blocktype(arena, cell, BLOCK_WHITE);
+ qcgc_arena_set_blocktype(arena, cell,
BLOCK_WHITE);
case BLOCK_FREE: // Fall through
case BLOCK_EXTENT: // Fall through
case BLOCK_WHITE: // Fall through
@@ -228,36 +137,52 @@
arena->block_bitmap[i] = new_block;
- for (size_t j = 0; j < 8; j++) {
- size_t cell = i * 8 + j;
- uint8_t mask = 1 << j;
- if ((new_mark & mask) == mask) {
- if (last_free_cell != 0) {
- // Coalesce
- new_mark &= ~mask;
- } else {
- last_free_cell = cell;
+ if (new_block == new_mark) {
+ // Both are 0
+ continue;
+ }
+
+ if (!new_block) {
+ // Only entries in the mark bitmap
+ if (last_free_cell != 0) {
+ // Coalesce
+ new_mark = 0;
+ } else {
+ uint8_t first = __builtin_ctz(new_mark);
+ new_mark = 1 << first;
+ last_free_cell = i * 8 + first;
+ }
+ } else {
+ for (size_t j = 0; j < 8; j++) {
+ size_t cell = i * 8 + j;
+ uint8_t m = (new_mark >> j) & 0x1;
+ uint8_t b = (new_block >> j) & 0x1;
+ uint8_t mask = 1 << j;
+ if (m) {
+ // Free
+ if (last_free_cell != 0) {
+ // Coalesce
+ new_mark &= ~mask;
+ } else {
+ last_free_cell = cell;
+ }
+ } else if (b) {
+ // White
+ free = false;
+ if (last_free_cell != 0) {
+
qcgc_fit_allocator_add(arena->cells + last_free_cell,
+ cell -
last_free_cell);
+#if DEBUG_ZERO_ON_SWEEP
+ memset(arena->cells +
last_free_cell, 0,
+ sizeof(cell_t)
* (cell - last_free_cell));
+#endif
+ qcgc_state.free_cells += cell -
last_free_cell;
+ qcgc_state.largest_free_block =
MAX(
+
qcgc_state.largest_free_block,
+ cell -
last_free_cell);
+ last_free_cell = 0;
+ }
}
- } else if ((new_block & mask) == mask) {
- free = false;
- if (last_free_cell != 0) {
- // Force update to satisfy precondition
- arena->mark_bitmap[i] = new_mark;
- qcgc_fit_allocator_add(arena->cells +
last_free_cell,
- cell - last_free_cell);
-#if DEBUG_ZERO_ON_SWEEP
- memset(arena->cells + last_free_cell, 0,
- sizeof(cell_t) * (cell
- last_free_cell));
-#endif
- qcgc_state.free_cells += cell -
last_free_cell;
- qcgc_state.largest_free_block = MAX(
-
qcgc_state.largest_free_block,
- cell - last_free_cell);
- last_free_cell = 0;
- }
- // White
- } else {
- // Extent
}
}
arena->mark_bitmap[i] = new_mark;
@@ -289,7 +214,7 @@
for (size_t cell = QCGC_ARENA_FIRST_CELL_INDEX;
cell < QCGC_ARENA_CELLS_COUNT;
cell++) {
- switch (qcgc_arena_get_blocktype((void *) &arena->cells[cell]))
{
+ switch (qcgc_arena_get_blocktype(arena, cell)) {
case BLOCK_WHITE: // Fall through
case BLOCK_BLACK:
return false;
@@ -310,7 +235,7 @@
for (size_t cell = QCGC_ARENA_FIRST_CELL_INDEX;
cell < QCGC_ARENA_CELLS_COUNT;
cell++) {
- switch (qcgc_arena_get_blocktype((void *) &arena->cells[cell]))
{
+ switch (qcgc_arena_get_blocktype(arena, cell)) {
case BLOCK_WHITE: // Fall through
case BLOCK_BLACK:
prev_was_free = false;
@@ -339,7 +264,7 @@
for (size_t cell = QCGC_ARENA_FIRST_CELL_INDEX;
cell < QCGC_ARENA_CELLS_COUNT;
cell++) {
- switch (qcgc_arena_get_blocktype((void *) &arena->cells[cell]))
{
+ switch (qcgc_arena_get_blocktype(arena, cell)) {
case BLOCK_WHITE: // Fall through
case BLOCK_BLACK:
case BLOCK_EXTENT:
@@ -361,7 +286,7 @@
for (size_t cell = QCGC_ARENA_FIRST_CELL_INDEX;
cell < QCGC_ARENA_CELLS_COUNT;
cell++) {
- switch (qcgc_arena_get_blocktype((void *) &arena->cells[cell]))
{
+ switch (qcgc_arena_get_blocktype(arena, cell)) {
case BLOCK_BLACK: // Fall through
case BLOCK_EXTENT:
case BLOCK_FREE:
@@ -383,7 +308,7 @@
for (size_t cell = QCGC_ARENA_FIRST_CELL_INDEX;
cell < QCGC_ARENA_CELLS_COUNT;
cell++) {
- switch (qcgc_arena_get_blocktype((void *) &arena->cells[cell]))
{
+ switch (qcgc_arena_get_blocktype(arena, cell)) {
case BLOCK_WHITE: // Fall through
case BLOCK_FREE:
case BLOCK_EXTENT:
diff --git a/rpython/translator/c/src/qcgc/src/arena.h
b/rpython/translator/c/src/qcgc/src/arena.h
--- a/rpython/translator/c/src/qcgc/src/arena.h
+++ b/rpython/translator/c/src/qcgc/src/arena.h
@@ -4,10 +4,9 @@
#pragma once
-#include "config.h"
+#include "../qcgc.h"
#include <stdbool.h>
-#include <stdint.h>
#include <sys/types.h>
#include "gray_stack.h"
@@ -70,66 +69,17 @@
void qcgc_arena_destroy(arena_t *arena);
/**
- * Arena pointer for given cell.
- *
- * @param ptr Pointer to cell for which you want to know the
corresponding
- * arena
- * @return The arena the pointer belongs to
- */
-arena_t *qcgc_arena_addr(cell_t *ptr);
-
-/**
- * Index of cell in arena.
- *
- * @param ptr Pointer to cell for which you want to know the
cell index
- * @return Index of the cell to which ptr points to
- */
-size_t qcgc_arena_cell_index(cell_t *ptr);
-
-/**
- * Get bitmap value for given bitmap and cell index.
- *
- * @param bitmap Bitmap
- * @param index Index of cell
- * @return true if bitmap entry is set, false otherwise
- */
-bool qcgc_arena_get_bitmap_entry(uint8_t *bitmap, size_t index);
-
-/**
- * Set bitmap value for given bitmap and cell index.
- *
- * @param bitmap Bitmap
- * @param index Index of cell
- * @param value 1 to set entry, 0 to reset entry
- */
-void qcgc_arena_set_bitmap_entry(uint8_t *bitmap, size_t index, uint8_t value);
-
-/**
- * Get blocktype.
- *
- * @param ptr Pointer to cell for which you want to know the
blocktype
- * @return Blocktype
- */
-QCGC_STATIC QCGC_INLINE blocktype_t qcgc_arena_get_blocktype(cell_t *ptr);
-
-/**
- * Set blocktype.
- *
- * @param ptr Pointer to cell for which you want to set the
blocktype
- * @param type Blocktype that should be set
- */
-QCGC_STATIC QCGC_INLINE void qcgc_arena_set_blocktype(cell_t *ptr, blocktype_t
type);
-
-/**
* Mark ptr as allocated area with given size.
+ * DEPRECATED
*
* @param ptr Pointer to first cell of area
* @param cells Size in cells
*/
-QCGC_STATIC void qcgc_arena_mark_allocated(cell_t *ptr, size_t cells);
+void qcgc_arena_mark_allocated(cell_t *ptr, size_t cells);
/**
* Mark cell ptr point to as free (no coalescing).
+ * DEPRECATED
*
* @param ptr Pointer to cell that should be marked as free
*/
@@ -153,6 +103,101 @@
/*******************************************************************************
+ * Inline functions
+
******************************************************************************/
+
+/**
+ * Arena pointer for given cell.
+ *
+ * @param ptr Pointer to cell for which you want to know the
corresponding
+ * arena
+ * @return The arena the pointer belongs to
+ */
+QCGC_STATIC QCGC_INLINE arena_t *qcgc_arena_addr(cell_t *ptr) {
+ return (arena_t *)((intptr_t) ptr & ~(QCGC_ARENA_SIZE - 1));
+}
+
+/**
+ * Index of cell in arena.
+ *
+ * @param ptr Pointer to cell for which you want to know the
cell index
+ * @return Index of the cell to which ptr points to
+ */
+QCGC_STATIC QCGC_INLINE size_t qcgc_arena_cell_index(cell_t *ptr) {
+ return (size_t)((intptr_t) ptr & (QCGC_ARENA_SIZE - 1)) >> 4;
+}
+
+/**
+ * Get blocktype.
+ *
+ * @param arena Arena in which to perform the lookup
+ * @param index Cell index of the block to look up
+ * @return Blocktype
+ */
+QCGC_STATIC QCGC_INLINE blocktype_t qcgc_arena_get_blocktype(arena_t *arena,
+ size_t index) {
+#if CHECKED
+ assert(arena != NULL);
+ assert(index >= QCGC_ARENA_FIRST_CELL_INDEX);
+ assert(index < QCGC_ARENA_CELLS_COUNT);
+#endif
+ // Read bitmap entry
+ size_t byte = index / 8;
+ uint8_t mask = 0x01 << (index % 8);
+ bool block_bit = ((arena->block_bitmap[byte] & mask) == mask);
+ bool mark_bit = ((arena->mark_bitmap[byte] & mask) == mask);
+
+ if (block_bit) {
+ if (mark_bit) {
+ return BLOCK_BLACK;
+ } else {
+ return BLOCK_WHITE;
+ }
+ } else {
+ if (mark_bit) {
+ return BLOCK_FREE;
+ } else {
+ return BLOCK_EXTENT;
+ }
+ }
+}
+
+/**
+ * Set blocktype.
+ *
+ * @param ptr Pointer to cell for which you want to set the
blocktype
+ * @param type Blocktype that should be set
+ */
+QCGC_STATIC QCGC_INLINE void qcgc_arena_set_blocktype(arena_t *arena,
+ size_t index, blocktype_t type) {
+#if CHECKED
+ assert(arena != NULL);
+ assert(index >= QCGC_ARENA_FIRST_CELL_INDEX);
+ assert(index < QCGC_ARENA_CELLS_COUNT);
+#endif
+ size_t byte = index / 8;
+ uint8_t mask = 0x1 << (index % 8);
+ switch(type) {
+ case BLOCK_EXTENT:
+ arena->block_bitmap[byte] &= ~mask;
+ arena->mark_bitmap[byte] &= ~mask;
+ break;
+ case BLOCK_FREE:
+ arena->block_bitmap[byte] &= ~mask;
+ arena->mark_bitmap[byte] |= mask;
+ break;
+ case BLOCK_WHITE:
+ arena->block_bitmap[byte] |= mask;
+ arena->mark_bitmap[byte] &= ~mask;
+ break;
+ case BLOCK_BLACK:
+ arena->block_bitmap[byte] |= mask;
+ arena->mark_bitmap[byte] |= mask;
+ break;
+ }
+}
+
+/*******************************************************************************
* Debug functions
*
******************************************************************************/
diff --git a/rpython/translator/c/src/qcgc/src/bag.h
b/rpython/translator/c/src/qcgc/src/bag.h
--- a/rpython/translator/c/src/qcgc/src/bag.h
+++ b/rpython/translator/c/src/qcgc/src/bag.h
@@ -1,8 +1,7 @@
#pragma once
-#include "config.h"
+#include "../qcgc.h"
-#include <stddef.h>
#include <stdlib.h>
#include "arena.h"
diff --git a/rpython/translator/c/src/qcgc/src/collector.c
b/rpython/translator/c/src/qcgc/src/collector.c
--- a/rpython/translator/c/src/qcgc/src/collector.c
+++ b/rpython/translator/c/src/qcgc/src/collector.c
@@ -1,8 +1,5 @@
#include "collector.h"
-#include <assert.h>
-#include <stddef.h>
-
#include "arena.h"
#include "allocator.h"
#include "gc_state.h"
@@ -13,7 +10,7 @@
QCGC_STATIC QCGC_INLINE void qcgc_pop_object(object_t *object);
QCGC_STATIC QCGC_INLINE void qcgc_push_object(object_t *object);
-QCGC_STATIC void qcgc_mark(bool incremental) {
+void qcgc_mark(bool incremental) {
{
struct log_info_s {
bool incremental;
@@ -42,8 +39,8 @@
qcgc_state.phase = GC_MARK;
// Always push all roots to make shadowstack pushes faster
- for (object_t **it = qcgc_state.shadow_stack_base;
- it < qcgc_state.shadow_stack;
+ for (object_t **it = qcgc_shadowstack.base;
+ it < qcgc_shadowstack.top;
it++) {
qcgc_push_object(*it);
}
@@ -110,7 +107,8 @@
(object->flags & QCGC_GRAY_FLAG) == QCGC_GRAY_FLAG);
if (((object->flags & QCGC_PREBUILT_OBJECT) == 0) &&
((object_t *) qcgc_arena_addr((cell_t *) object) != object)) {
- assert(qcgc_arena_get_blocktype((cell_t *) object) ==
BLOCK_BLACK);
+ assert(qcgc_arena_get_blocktype(qcgc_arena_addr((cell_t *)
object),
+ qcgc_arena_cell_index((cell_t *)
object)) == BLOCK_BLACK);
}
#endif
object->flags &= ~QCGC_GRAY_FLAG;
@@ -122,22 +120,23 @@
assert(qcgc_state.phase == GC_MARK);
#endif
if (object != NULL) {
- if ((object_t *) qcgc_arena_addr((cell_t *) object) == object) {
+ arena_t *arena = qcgc_arena_addr((cell_t *) object);
+ if ((object_t *) arena == object) {
if (qcgc_hbtable_mark(object)) {
// Did mark it / was white before
object->flags |= QCGC_GRAY_FLAG;
qcgc_state.gp_gray_stack = qcgc_gray_stack_push(
qcgc_state.gp_gray_stack,
object);
}
- return; // Skip tests
+ return;
}
if ((object->flags & QCGC_PREBUILT_OBJECT) != 0) {
- return; // Prebuilt objects are always black, no
pushing here
+ return;
}
- if (qcgc_arena_get_blocktype((cell_t *) object) == BLOCK_WHITE)
{
+ size_t index = qcgc_arena_cell_index((cell_t *) object);
+ if (qcgc_arena_get_blocktype(arena, index) == BLOCK_WHITE) {
object->flags |= QCGC_GRAY_FLAG;
- qcgc_arena_set_blocktype((cell_t *) object,
BLOCK_BLACK);
- arena_t *arena = qcgc_arena_addr((cell_t *) object);
+ qcgc_arena_set_blocktype(arena, index, BLOCK_BLACK);
arena->gray_stack =
qcgc_gray_stack_push(arena->gray_stack, object);
}
}
diff --git a/rpython/translator/c/src/qcgc/src/collector.h
b/rpython/translator/c/src/qcgc/src/collector.h
--- a/rpython/translator/c/src/qcgc/src/collector.h
+++ b/rpython/translator/c/src/qcgc/src/collector.h
@@ -1,12 +1,8 @@
#pragma once
-#include "config.h"
+#include "../qcgc.h"
#include <stdbool.h>
-#include "object.h"
-
-QCGC_STATIC void qcgc_mark(bool incremental);
-QCGC_STATIC void qcgc_sweep(void);
-
-extern void qcgc_trace_cb(object_t *object, void (*visit)(object_t *object));
+void qcgc_mark(bool incremental);
+void qcgc_sweep(void);
diff --git a/rpython/translator/c/src/qcgc/src/config.h
b/rpython/translator/c/src/qcgc/src/config.h
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/config.h
+++ /dev/null
@@ -1,58 +0,0 @@
-#pragma once
-
-#define CHECKED 0 //
Enable runtime sanity checks
-#define DEBUG_ZERO_ON_SWEEP 0 // Zero memory on sweep
(debug only)
-
-#define QCGC_INIT_ZERO 0 // Init new
objects with zero bytes
-
-/**
- * Event logger
- */
-#define EVENT_LOG 1 //
Enable event log
-#define LOGFILE "./qcgc_events.log" // Default logfile
-#define LOG_ALLOCATION 0 // Enable
allocation log (warning:
-
// significant performance impact)
-
-#define QCGC_SHADOWSTACK_SIZE 163840 // Total shadowstack size
-#define QCGC_ARENA_BAG_INIT_SIZE 16 // Initial size of the
arena bag
-#define QCGC_ARENA_SIZE_EXP 20 // Between 16 (64kB)
and 20 (1MB)
-#define QCGC_LARGE_ALLOC_THRESHOLD_EXP 14 // Less than QCGC_ARENA_SIZE_EXP
-#define QCGC_MARK_LIST_SEGMENT_SIZE 64 // TODO: Tune for performance
-#define QCGC_GRAY_STACK_INIT_SIZE 128 // TODO: Tune for performance
-#define QCGC_INC_MARK_MIN 64 // TODO: Tune for
performance
-
-/**
- * Fit allocator
- */
-#define QCGC_LARGE_FREE_LIST_FIRST_EXP 5 // First exponent of large free
list
-#define QCGC_LARGE_FREE_LIST_INIT_SIZE 4 // Initial size for large free
lists
-#define QCGC_SMALL_FREE_LIST_INIT_SIZE 16 // Initial size for small free
lists
-
-/**
- * Auto Mark/Collect
- */
-#define QCGC_MAJOR_COLLECTION_THRESHOLD (5 * (1<<QCGC_ARENA_SIZE_EXP))
-#define QCGC_INCMARK_THRESHOLD (1<<QCGC_ARENA_SIZE_EXP)
-
-/**
- * DO NOT MODIFY BELOW HERE
- */
-
-#if QCGC_LARGE_ALLOC_THRESHOLD_EXP >= QCGC_ARENA_SIZE_EXP
-#error "Inconsistent configuration. Huge block threshold must be smaller " \
- "than the arena size."
-#endif
-
-#ifdef TESTING
-#define QCGC_STATIC
-#define QCGC_INLINE
-#else
-#define QCGC_STATIC static
-#define QCGC_INLINE inline __attribute((always_inline))
-#endif
-
-#define MAX(a,b) (((a)>(b))?(a):(b))
-#define MIN(a,b) (((a)<(b))?(a):(b))
-#define UNUSED(x) (void)(x)
-#define LIKELY(x) __builtin_expect((x), 1)
-#define UNLIKELY(x) __builtin_expect((x), 0)
diff --git a/rpython/translator/c/src/qcgc/src/core.c
b/rpython/translator/c/src/qcgc/src/core.c
--- a/rpython/translator/c/src/qcgc/src/core.c
+++ b/rpython/translator/c/src/qcgc/src/core.c
@@ -59,15 +59,6 @@
free(qcgc_state.gp_gray_stack);
}
-void qcgc_push_root(object_t *object) {
- *qcgc_state.shadow_stack = object;
- qcgc_state.shadow_stack++;
-}
-
-void qcgc_pop_root(void) {
- qcgc_state.shadow_stack--;
-}
-
object_t *qcgc_allocate(size_t size) {
#if LOG_ALLOCATION
qcgc_event_logger_log(EVENT_ALLOCATE_START, sizeof(size_t),
diff --git a/rpython/translator/c/src/qcgc/src/core.h
b/rpython/translator/c/src/qcgc/src/core.h
--- a/rpython/translator/c/src/qcgc/src/core.h
+++ b/rpython/translator/c/src/qcgc/src/core.h
@@ -9,8 +9,6 @@
void qcgc_initialize(void);
void qcgc_destroy(void);
-void qcgc_push_root(object_t *object);
-void qcgc_pop_root(void);
object_t *qcgc_allocate(size_t size);
void qcgc_collect(void);
void qcgc_write(object_t *object);
diff --git a/rpython/translator/c/src/qcgc/src/event_logger.h
b/rpython/translator/c/src/qcgc/src/event_logger.h
--- a/rpython/translator/c/src/qcgc/src/event_logger.h
+++ b/rpython/translator/c/src/qcgc/src/event_logger.h
@@ -1,6 +1,6 @@
#pragma once
-#include "config.h"
+#include "../config.h"
#include <stddef.h>
#include <stdint.h>
diff --git a/rpython/translator/c/src/qcgc/src/gc_state.h
b/rpython/translator/c/src/qcgc/src/gc_state.h
--- a/rpython/translator/c/src/qcgc/src/gc_state.h
+++ b/rpython/translator/c/src/qcgc/src/gc_state.h
@@ -1,5 +1,7 @@
#pragma once
+#include "../qcgc.h"
+
#include <stddef.h>
#include "bag.h"
@@ -25,8 +27,6 @@
* Global state of the garbage collector
*/
struct qcgc_state {
- object_t **shadow_stack;
- object_t **shadow_stack_base;
shadow_stack_t *prebuilt_objects;
weakref_bag_t *weakrefs;
gray_stack_t *gp_gray_stack;
@@ -43,6 +43,4 @@
size_t largest_free_block; // Size of the largest free block.
// (Free arenas
don't count as free blocks)
// Valid right
after sweep
-};
-
-QCGC_STATIC struct qcgc_state qcgc_state;
+} qcgc_state;
diff --git a/rpython/translator/c/src/qcgc/src/gray_stack.h
b/rpython/translator/c/src/qcgc/src/gray_stack.h
--- a/rpython/translator/c/src/qcgc/src/gray_stack.h
+++ b/rpython/translator/c/src/qcgc/src/gray_stack.h
@@ -1,10 +1,6 @@
#pragma once
-#include "config.h"
-
-#include <stddef.h>
-
-#include "object.h"
+#include "../qcgc.h"
typedef struct gray_stack_s {
size_t index;
diff --git a/rpython/translator/c/src/qcgc/src/hugeblocktable.c
b/rpython/translator/c/src/qcgc/src/hugeblocktable.c
--- a/rpython/translator/c/src/qcgc/src/hugeblocktable.c
+++ b/rpython/translator/c/src/qcgc/src/hugeblocktable.c
@@ -2,8 +2,6 @@
#include <assert.h>
-#include "gc_state.h"
-
QCGC_STATIC size_t bucket(object_t *object);
void qcgc_hbtable_initialize(void) {
diff --git a/rpython/translator/c/src/qcgc/src/hugeblocktable.h
b/rpython/translator/c/src/qcgc/src/hugeblocktable.h
--- a/rpython/translator/c/src/qcgc/src/hugeblocktable.h
+++ b/rpython/translator/c/src/qcgc/src/hugeblocktable.h
@@ -1,11 +1,10 @@
#pragma once
-#include "config.h"
+#include "../qcgc.h"
#include <stdbool.h>
#include "bag.h"
-#include "object.h"
#include "gray_stack.h"
// Choosing a prime number, hoping for good results
diff --git a/rpython/translator/c/src/qcgc/src/mark_list.c
b/rpython/translator/c/src/qcgc/src/mark_list.c
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/mark_list.c
+++ /dev/null
@@ -1,180 +0,0 @@
-#include "mark_list.h"
-
-#include <assert.h>
-
-#include <stdlib.h>
-#include <string.h>
-
-QCGC_STATIC mark_list_t *qcgc_mark_list_grow(mark_list_t *list);
-QCGC_STATIC void qcgc_mark_list_check_invariant(mark_list_t *list);
-
-mark_list_t *qcgc_mark_list_create(size_t initial_size) {
- size_t length = (initial_size + QCGC_MARK_LIST_SEGMENT_SIZE - 1) /
QCGC_MARK_LIST_SEGMENT_SIZE;
- length += (size_t) length == 0;
- mark_list_t *result = (mark_list_t *)
- malloc(sizeof(mark_list_t) + length * sizeof(object_t **));
- result->head = 0;
- result->tail = 0;
- result->length = length;
- result->insert_index = 0;
- result->count = 0;
- result->segments[result->head] = (object_t **)
- calloc(QCGC_MARK_LIST_SEGMENT_SIZE, sizeof(object_t *));
-#if CHECKED
- qcgc_mark_list_check_invariant(result);
-#endif
- return result;
-}
-
-void qcgc_mark_list_destroy(mark_list_t *list) {
-#if CHECKED
- qcgc_mark_list_check_invariant(list);
-#endif
-
- size_t i = list->head;
- while (i != list->tail) {
- free(list->segments[i]);
- i = (i + 1) % list->length;
- }
- free(list->segments[list->tail]);
- free(list);
-}
-
-mark_list_t *qcgc_mark_list_push(mark_list_t *list, object_t *object) {
-#if CHECKED
- assert(list != NULL);
- assert(object != NULL);
-
- qcgc_mark_list_check_invariant(list);
- size_t old_count = list->count;
-#endif
- if (list->insert_index >= QCGC_MARK_LIST_SEGMENT_SIZE) {
- if ((list->tail + 1) % list->length == list->head) {
- list = qcgc_mark_list_grow(list);
- }
- list->insert_index = 0;
- list->tail = (list->tail + 1) % list->length;
- list->segments[list->tail] = (object_t **)
- calloc(QCGC_MARK_LIST_SEGMENT_SIZE, sizeof(object_t *));
- }
- list->segments[list->tail][list->insert_index] = object;
- list->insert_index++;
- list->count++;
-#if CHECKED
- assert(list->count == old_count + 1);
- assert(list->segments[list->tail][list->insert_index - 1] == object);
- qcgc_mark_list_check_invariant(list);
-#endif
- return list;
-}
-
-mark_list_t *qcgc_mark_list_push_all(mark_list_t *list,
- object_t **objects, size_t count) {
-#if CHECKED
- assert(list != NULL);
- assert(objects != NULL);
-
- qcgc_mark_list_check_invariant(list);
-
- size_t old_count = list->count;
- for (size_t i = 0; i < count; i++) {
- assert(objects[i] != NULL);
- }
-#endif
- // FIXME: Optimize or remove
- for (size_t i = 0; i < count; i++) {
- list = qcgc_mark_list_push(list, objects[i]);
- }
-#if CHECKED
- assert(list->count == old_count + count);
- qcgc_mark_list_check_invariant(list);
-#endif
- return list;
-}
-
-object_t **qcgc_mark_list_get_head_segment(mark_list_t *list) {
-#if CHECKED
- assert(list != NULL);
- assert(list->segments[list->head] != NULL);
- qcgc_mark_list_check_invariant(list);
-#endif
- return list->segments[list->head];
-}
-
-mark_list_t *qcgc_mark_list_drop_head_segment(mark_list_t *list) {
-#if CHECKED
- assert(list != NULL);
- size_t old_head = list->head;
- size_t old_tail = list->tail;
- qcgc_mark_list_check_invariant(list);
-#endif
- if (list->head != list->tail) {
- free(list->segments[list->head]);
- list->segments[list->head] = NULL;
- list->head = (list->head + 1) % list->length;
- list->count -= QCGC_MARK_LIST_SEGMENT_SIZE;
- } else {
- memset(list->segments[list->head], 0,
- sizeof(object_t *) *
QCGC_MARK_LIST_SEGMENT_SIZE);
- list->insert_index = 0;
- list->count = 0;
- }
-#if CHECKED
- assert(old_tail == list->tail);
- if (old_head == old_tail) {
- assert(old_head == list->head);
- } else {
- assert((old_head + 1) % list->length == list->head);
- }
- qcgc_mark_list_check_invariant(list);
-#endif
- return list;
-}
-
-QCGC_STATIC mark_list_t *qcgc_mark_list_grow(mark_list_t *list) {
-#if CHECKED
- assert(list != NULL);
- size_t old_length = list->length;
- size_t old_tail = list->tail;
- qcgc_mark_list_check_invariant(list);
-#endif
- mark_list_t *new_list = (mark_list_t *) realloc(list,
- sizeof(mark_list_t) + 2 * list->length *
sizeof(object_t **));
- if (new_list->tail < new_list->head) {
- memcpy(new_list->segments + new_list->length,
- new_list->segments, (new_list->tail + 1) *
sizeof(object_t **));
- new_list->tail = new_list->length + new_list->tail;
- }
- new_list->length = 2 * new_list->length;
-#if CHECKED
- assert(new_list->length == 2 * old_length);
- if (old_tail < new_list->head) {
- assert(new_list->tail == old_tail + old_length);
- for (size_t i = 0; i < old_tail; i++) {
- assert(new_list->segments[i] == new_list->segments[i +
old_length]);
- }
- } else {
- assert(new_list->tail == old_tail);
- }
- qcgc_mark_list_check_invariant(new_list);
-#endif
- return new_list;
-}
-
-QCGC_STATIC void qcgc_mark_list_check_invariant(mark_list_t *list) {
- assert(list->head < list->length);
- assert(list->tail < list->length);
- assert(list->count == (list->tail - list->head + list->length) %
list->length * QCGC_MARK_LIST_SEGMENT_SIZE + list->insert_index);
- for (size_t i = 0; i < list->length; i++) {
- if ((list->head <= i && i <= list->tail) || (list->tail <
list->head &&
- (i <= list->tail || i >= list->head))) {
- for (size_t j = 0; j < QCGC_MARK_LIST_SEGMENT_SIZE;
j++) {
- if (i != list->tail || j < list->insert_index) {
- assert(list->segments[i][j] != NULL);
- } else {
- assert(list->segments[i][j] == NULL);
- }
- }
- }
- }
-}
diff --git a/rpython/translator/c/src/qcgc/src/mark_list.h
b/rpython/translator/c/src/qcgc/src/mark_list.h
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/mark_list.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * @file mark_list.h
- *
- * Object list for marking step
- */
-
-#pragma once
-
-#include "config.h"
-
-#include <stddef.h>
-
-#include "object.h"
-
-/**
- * Mark list - circular buffer.
- */
-typedef struct mark_list_s {
- size_t head;
- size_t tail;
- size_t length;
- size_t count;
- size_t insert_index;
- object_t **segments[];
-} mark_list_t;
-
-__attribute__ ((warn_unused_result))
-mark_list_t *qcgc_mark_list_create(size_t initial_size);
-void qcgc_mark_list_destroy(mark_list_t *list);
-
-__attribute__ ((warn_unused_result))
-mark_list_t *qcgc_mark_list_push(mark_list_t *list, object_t *object);
-
-__attribute__ ((warn_unused_result))
-mark_list_t *qcgc_mark_list_push_all(mark_list_t *list,
- object_t **objects, size_t count);
-
-object_t **qcgc_mark_list_get_head_segment(mark_list_t *list);
-
-__attribute__ ((warn_unused_result))
-mark_list_t *qcgc_mark_list_drop_head_segment(mark_list_t *list);
diff --git a/rpython/translator/c/src/qcgc/src/object.h
b/rpython/translator/c/src/qcgc/src/object.h
deleted file mode 100644
--- a/rpython/translator/c/src/qcgc/src/object.h
+++ /dev/null
@@ -1,17 +0,0 @@
-#pragma once
-
-
-#include "config.h"
-#include <stdint.h>
-
-/**
- * The lower half of flags is reserved for the library, the upper half for
- * clients
- */
-#define QCGC_GRAY_FLAG (1<<0)
-#define QCGC_PREBUILT_OBJECT (1<<1)
-#define QCGC_PREBUILT_REGISTERED (1<<2)
-
-typedef struct object_s {
- uint32_t flags;
-} object_t;
diff --git a/rpython/translator/c/src/qcgc/src/shadow_stack.h
b/rpython/translator/c/src/qcgc/src/shadow_stack.h
--- a/rpython/translator/c/src/qcgc/src/shadow_stack.h
+++ b/rpython/translator/c/src/qcgc/src/shadow_stack.h
@@ -1,10 +1,6 @@
#pragma once
-#include "config.h"
-
-#include <stddef.h>
-
-#include "object.h"
+#include "../qcgc.h"
typedef struct shadow_stack_s {
size_t count;
diff --git a/rpython/translator/c/src/qcgc/src/signal_handler.c
b/rpython/translator/c/src/qcgc/src/signal_handler.c
--- a/rpython/translator/c/src/qcgc/src/signal_handler.c
+++ b/rpython/translator/c/src/qcgc/src/signal_handler.c
@@ -7,7 +7,6 @@
#include "arena.h"
#include "allocator.h"
-#include "gc_state.h"
QCGC_STATIC void handle_error(int signo, siginfo_t *siginfo, void *context);
QCGC_STATIC bool is_stack_overflow(void *addr);
@@ -44,7 +43,7 @@
}
QCGC_STATIC bool is_stack_overflow(void *addr) {
- void *shadow_stack_end = (void *)(qcgc_state.shadow_stack_base +
+ void *shadow_stack_end = (void *)(qcgc_shadowstack.base +
QCGC_SHADOWSTACK_SIZE);
return (addr >= shadow_stack_end && addr < shadow_stack_end + 8192);
}
diff --git a/rpython/translator/c/src/qcgc/src/signal_handler.h
b/rpython/translator/c/src/qcgc/src/signal_handler.h
--- a/rpython/translator/c/src/qcgc/src/signal_handler.h
+++ b/rpython/translator/c/src/qcgc/src/signal_handler.h
@@ -1,5 +1,5 @@
#pragma once
-#include "config.h"
+#include "../qcgc.h"
void setup_signal_handler(void);
diff --git a/rpython/translator/c/src/qcgc/src/weakref.c
b/rpython/translator/c/src/qcgc/src/weakref.c
--- a/rpython/translator/c/src/qcgc/src/weakref.c
+++ b/rpython/translator/c/src/qcgc/src/weakref.c
@@ -2,33 +2,20 @@
#include <assert.h>
+#include "arena.h"
#include "bag.h"
#include "gc_state.h"
#include "hugeblocktable.h"
-void qcgc_register_weakref(object_t *weakrefobj, object_t **target) {
-#if CHECKED
- assert((weakrefobj->flags & QCGC_PREBUILT_OBJECT) == 0);
- assert((object_t *) qcgc_arena_addr((cell_t *) weakrefobj) !=
weakrefobj);
-#endif
- // NOTE: At this point, the target must point to a pointer to a valid
- // object. We don't register any weakrefs to prebuilt objects as they
- // are always valid.
- if (((*target)->flags & QCGC_PREBUILT_OBJECT) == 0) {
- qcgc_state.weakrefs = qcgc_weakref_bag_add(qcgc_state.weakrefs,
- (struct weakref_bag_item_s) {
- .weakrefobj = weakrefobj,
- .target = target});
- }
-}
-
-QCGC_STATIC void update_weakrefs(void) {
+void update_weakrefs(void) {
size_t i = 0;
while (i < qcgc_state.weakrefs->count) {
struct weakref_bag_item_s item = qcgc_state.weakrefs->items[i];
// Check whether weakref object itself was collected
// We know the weakref object is a normal object
- switch(qcgc_arena_get_blocktype((cell_t *) item.weakrefobj)) {
+ switch (qcgc_arena_get_blocktype(
+ qcgc_arena_addr((cell_t *)
item.weakrefobj),
+ qcgc_arena_cell_index((cell_t *)
item.weakrefobj))) {
case BLOCK_EXTENT: // Fall through
case BLOCK_FREE:
// Weakref itself was collected, forget it
@@ -57,7 +44,9 @@
}
} else {
// Normal object
- switch(qcgc_arena_get_blocktype((cell_t *) points_to)) {
+ switch (qcgc_arena_get_blocktype(
+ qcgc_arena_addr((cell_t *)
points_to),
+ qcgc_arena_cell_index((cell_t
*) points_to))) {
case BLOCK_BLACK: // Still valid
case BLOCK_WHITE:
i++;
diff --git a/rpython/translator/c/src/qcgc/src/weakref.h
b/rpython/translator/c/src/qcgc/src/weakref.h
--- a/rpython/translator/c/src/qcgc/src/weakref.h
+++ b/rpython/translator/c/src/qcgc/src/weakref.h
@@ -1,8 +1,5 @@
#pragma once
-#include "config.h"
+#include "../qcgc.h"
-#include "object.h"
-
-void qcgc_register_weakref(object_t *weakrefobj, object_t **target);
-QCGC_STATIC void update_weakrefs(void);
+void update_weakrefs(void);
diff --git a/rpython/translator/platform/linux.py
b/rpython/translator/platform/linux.py
--- a/rpython/translator/platform/linux.py
+++ b/rpython/translator/platform/linux.py
@@ -14,7 +14,7 @@
extra_libs = ('-lrt',)
cflags = tuple(
['-O3', '-pthread', '-fomit-frame-pointer',
- '-Wall', '-Wno-unused']
+ '-Wall', '-Wno-unused', '-g']
+ os.environ.get('CFLAGS', '').split())
standalone_only = ()
shared_only = ('-fPIC',)
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit