Author: Nicolas Truessel <[email protected]>
Branch: quad-color-gc
Changeset: r86892:37cdb0f205ea
Date: 2016-09-05 21:15 +0200
http://bitbucket.org/pypy/pypy/changeset/37cdb0f205ea/
Log: Update qcgc codebase
diff --git a/rpython/memory/gctransform/qcgcframework.py
b/rpython/memory/gctransform/qcgcframework.py
--- a/rpython/memory/gctransform/qcgcframework.py
+++ b/rpython/memory/gctransform/qcgcframework.py
@@ -2,7 +2,8 @@
from rpython.rtyper.lltypesystem import lltype, llmemory, rffi
from rpython.rtyper import rmodel
from rpython.rtyper.lltypesystem.lloperation import llop
-from rpython.memory.gctransform.framework import (BaseFrameworkGCTransformer,
BaseRootWalker)
+from rpython.memory.gctransform.framework import (BaseFrameworkGCTransformer,
+ BaseRootWalker, TYPE_ID, WEAKREF, WEAKREFPTR)
VISIT_FPTR = lltype.Ptr(lltype.FuncType([llmemory.Address], lltype.Void))
@@ -34,6 +35,12 @@
[SomeAddress(), SomePtr(VISIT_FPTR)],
s_None))
+ #Compilation error when overriding, no idea why
+ #def finish_tables(self):
+ # BaseFrameworkGCTransformer.finish_tables(self)
+ #Makes test fail, works when translating pypy (but compiling still
fails)
+ #assert len(self.layoutbuilder.addresses_of_static_ptrs_in_nongc) == 2
+
def gc_header_for(self, obj, needs_hash=False):
hdr = self.gcdata.gc.gcheaderbuilder.header_of_object(obj)
if needs_hash:
@@ -77,6 +84,40 @@
# hop.genop("cast_adr_to_ptr", [v_adr],
# resultvar = hop.spaceop.result)
+ def gct_weakref_create(self, hop):
+ # Custom weakref creation as their registration is slightly different
+ op = hop.spaceop
+
+ type_id = self.get_type_id(WEAKREF)
+
+ c_type_id = rmodel.inputconst(TYPE_ID, type_id)
+ info = self.layoutbuilder.get_info(type_id)
+ c_size = rmodel.inputconst(lltype.Signed, info.fixedsize)
+ malloc_ptr = self.malloc_fixedsize_ptr
+ c_false = rmodel.inputconst(lltype.Bool, False)
+ c_has_weakptr = rmodel.inputconst(lltype.Bool, True)
+ args = [self.c_const_gc, c_type_id, c_size,
+ c_false, c_false, c_has_weakptr]
+
+ # push and pop the current live variables *including* the argument
+ # to the weakref_create operation, which must be kept alive if the GC
+ # needs to collect
+ livevars = self.push_roots(hop, keep_current_args=True)
+ v_result = hop.genop("direct_call", [malloc_ptr] + args,
+ resulttype=llmemory.GCREF)
+ v_result = hop.genop("cast_opaque_ptr", [v_result],
+ resulttype=WEAKREFPTR)
+ self.pop_roots(hop, livevars)
+ #
+ v_instance, = op.args
+ v_addr = hop.genop("cast_ptr_to_adr", [v_instance],
+ resulttype=llmemory.Address)
+ hop.genop("bare_setfield",
+ [v_result, rmodel.inputconst(lltype.Void, "weakptr"),
v_addr])
+ v_weakref = hop.genop("cast_ptr_to_weakrefptr", [v_result],
+ resulttype=llmemory.WeakRefPtr)
+ hop.cast_result(v_weakref)
+
class QcgcRootWalker(BaseRootWalker):
def walk_stack_roots(self, collect_stack_root, is_minor=False):
raise NotImplementedError
diff --git a/rpython/rtyper/lltypesystem/lloperation.py
b/rpython/rtyper/lltypesystem/lloperation.py
--- a/rpython/rtyper/lltypesystem/lloperation.py
+++ b/rpython/rtyper/lltypesystem/lloperation.py
@@ -515,10 +515,9 @@
# can malloc a GC object.
# __________ qcgc operations __________
- 'qcgc_allocate': LLOp(canmallocgc=True),
- 'qcgc_collect': LLOp(canmallocgc=True),
- 'qcgc_is_prebuilt': LLOp(),
- 'qcgc_write_barrier': LLOp(),
+ 'qcgc_allocate': LLOp(canmallocgc=True),
+ 'qcgc_collect': LLOp(canmallocgc=True),
+ 'qcgc_write_barrier': LLOp(),
# __________ weakrefs __________
diff --git a/rpython/translator/c/funcgen.py b/rpython/translator/c/funcgen.py
--- a/rpython/translator/c/funcgen.py
+++ b/rpython/translator/c/funcgen.py
@@ -960,12 +960,6 @@
def OP_QCGC_COLLECT(self, op):
return 'qcgc_collect();'
- def OP_QCGC_IS_PREBUILT(self, op):
- obj = self.expr(op.args[0])
- result = self.expr(op.result)
- return '%s = (((object_t *) %s)->flags & QCGC_PREBUILT_OBJECT) != 0;'
% (
- result, obj)
-
def OP_QCGC_WRITE_BARRIER(self, op):
obj = self.expr(op.args[0])
return 'qcgc_write(%s);' % (obj,)
diff --git a/rpython/translator/c/src/qcgc/bag.c
b/rpython/translator/c/src/qcgc/bag.c
--- a/rpython/translator/c/src/qcgc/bag.c
+++ b/rpython/translator/c/src/qcgc/bag.c
@@ -6,3 +6,4 @@
DEFINE_BAG(linear_free_list, cell_t *);
DEFINE_BAG(exp_free_list, struct exp_free_list_item_s);
DEFINE_BAG(hbbucket, struct hbtable_entry_s);
+DEFINE_BAG(weakref_bag, struct weakref_bag_item_s);
diff --git a/rpython/translator/c/src/qcgc/bag.h
b/rpython/translator/c/src/qcgc/bag.h
--- a/rpython/translator/c/src/qcgc/bag.h
+++ b/rpython/translator/c/src/qcgc/bag.h
@@ -92,7 +92,13 @@
bool mark_flag;
};
+struct weakref_bag_item_s {
+ object_t *weakrefobj;
+ object_t **target;
+};
+
DECLARE_BAG(arena_bag, arena_t *);
DECLARE_BAG(linear_free_list, cell_t *);
DECLARE_BAG(exp_free_list, struct exp_free_list_item_s);
DECLARE_BAG(hbbucket, struct hbtable_entry_s);
+DECLARE_BAG(weakref_bag, struct weakref_bag_item_s);
diff --git a/rpython/translator/c/src/qcgc/config.h
b/rpython/translator/c/src/qcgc/config.h
--- a/rpython/translator/c/src/qcgc/config.h
+++ b/rpython/translator/c/src/qcgc/config.h
@@ -1,15 +1,14 @@
#pragma once
-#define CHECKED 0 //
Enable runtime sanity checks
-
// warning: huge performance impact
-#define DEBUG_ZERO_ON_SWEEP 0 // Zero memory on sweep
(debug only)
+#define CHECKED 1 //
Enable runtime sanity checks
+#define DEBUG_ZERO_ON_SWEEP 1 // Zero memory on sweep
(debug only)
#define QCGC_INIT_ZERO 1 // Init new
objects with zero bytes
/**
* Event logger
*/
-#define EVENT_LOG 0 //
Enable event log
+#define EVENT_LOG 1 //
Enable event log
#define LOGFILE "./qcgc_events.log" // Default logfile
#define LOG_ALLOCATION 0 // Enable
allocation log (warning:
// significant performance impact)
@@ -31,6 +30,12 @@
#define QCGC_SMALL_FREE_LIST_INIT_SIZE 16 // Initial size for small free
lists
/**
+ * Auto Mark/Collect
+ */
+#define QCGC_MAJOR_COLLECTION_THRESHOLD (5 * (1<<QCGC_ARENA_SIZE_EXP))
+#define QCGC_INCMARK_THRESHOLD (1<<QCGC_ARENA_SIZE_EXP)
+
+/**
* DO NOT MODIFY BELOW HERE
*/
diff --git a/rpython/translator/c/src/qcgc/gc_state.h
b/rpython/translator/c/src/qcgc/gc_state.h
--- a/rpython/translator/c/src/qcgc/gc_state.h
+++ b/rpython/translator/c/src/qcgc/gc_state.h
@@ -2,6 +2,8 @@
#include <stddef.h>
+#include "bag.h"
+#include "gray_stack.h"
#include "shadow_stack.h"
/**
@@ -25,7 +27,10 @@
struct qcgc_state {
shadow_stack_t *shadow_stack;
shadow_stack_t *prebuilt_objects;
+ weakref_bag_t *weakrefs;
gray_stack_t *gp_gray_stack;
size_t gray_stack_size;
gc_phase_t phase;
+ size_t bytes_since_collection;
+ size_t bytes_since_incmark;
} qcgc_state;
diff --git a/rpython/translator/c/src/qcgc/hugeblocktable.c
b/rpython/translator/c/src/qcgc/hugeblocktable.c
--- a/rpython/translator/c/src/qcgc/hugeblocktable.c
+++ b/rpython/translator/c/src/qcgc/hugeblocktable.c
@@ -45,6 +45,17 @@
return false;
}
+bool qcgc_hbtable_has(object_t *object) {
+ hbbucket_t *b = qcgc_hbtable.bucket[bucket(object)];
+ size_t count = b->count;
+ for (size_t i = 0; i < count; i++) {
+ if (b->items[i].object == object) {
+ return true;
+ }
+ }
+ return false;
+}
+
bool qcgc_hbtable_is_marked(object_t *object) {
hbbucket_t *b = qcgc_hbtable.bucket[bucket(object)];
size_t count = b->count;
diff --git a/rpython/translator/c/src/qcgc/hugeblocktable.h
b/rpython/translator/c/src/qcgc/hugeblocktable.h
--- a/rpython/translator/c/src/qcgc/hugeblocktable.h
+++ b/rpython/translator/c/src/qcgc/hugeblocktable.h
@@ -20,5 +20,6 @@
void qcgc_hbtable_destroy(void);
void qcgc_hbtable_insert(object_t *object);
bool qcgc_hbtable_mark(object_t *object);
+bool qcgc_hbtable_has(object_t *object);
bool qcgc_hbtable_is_marked(object_t *object);
void qcgc_hbtable_sweep(void);
diff --git a/rpython/translator/c/src/qcgc/qcgc.c
b/rpython/translator/c/src/qcgc/qcgc.c
--- a/rpython/translator/c/src/qcgc/qcgc.c
+++ b/rpython/translator/c/src/qcgc/qcgc.c
@@ -10,20 +10,41 @@
#include "hugeblocktable.h"
#include "event_logger.h"
+#define env_or_fallback(var, env_name, fallback) while(0) { \
+ char *env_val = getenv(env_name);
\
+ if (env_val != NULL) {
\
+ if (1 != sscanf(env_val, "%zu", &var)) {
\
+ var = fallback;
\
+ }
\
+ }
\
+}
+
void qcgc_mark(bool incremental);
void qcgc_pop_object(object_t *object);
void qcgc_push_object(object_t *object);
void qcgc_sweep(void);
+static size_t major_collection_threshold = QCGC_MAJOR_COLLECTION_THRESHOLD;
+static size_t incmark_threshold = QCGC_INCMARK_THRESHOLD;
+
+QCGC_STATIC void update_weakrefs(void);
+
void qcgc_initialize(void) {
qcgc_state.shadow_stack =
qcgc_shadow_stack_create(QCGC_SHADOWSTACK_SIZE);
- qcgc_state.prebuilt_objects = qcgc_shadow_stack_create(16); //XXX
+ qcgc_state.prebuilt_objects = qcgc_shadow_stack_create(16); // XXX
+ qcgc_state.weakrefs = qcgc_weakref_bag_create(16); // XXX
qcgc_state.gp_gray_stack = qcgc_gray_stack_create(16); // XXX
qcgc_state.gray_stack_size = 0;
qcgc_state.phase = GC_PAUSE;
+ qcgc_state.bytes_since_collection = 0;
+ qcgc_state.bytes_since_incmark = 0;
qcgc_allocator_initialize();
qcgc_hbtable_initialize();
qcgc_event_logger_initialize();
+
+ env_or_fallback(major_collection_threshold, "QCGC_MAJOR_COLLECTION",
+ QCGC_MAJOR_COLLECTION_THRESHOLD);
+ env_or_fallback(incmark_threshold, "QCGC_INCMARK",
QCGC_INCMARK_THRESHOLD);
}
void qcgc_destroy(void) {
@@ -32,6 +53,7 @@
qcgc_allocator_destroy();
free(qcgc_state.shadow_stack);
free(qcgc_state.prebuilt_objects);
+ free(qcgc_state.weakrefs);
free(qcgc_state.gp_gray_stack);
}
@@ -113,6 +135,14 @@
(uint8_t *) &size);
#endif
object_t *result;
+
+ if (qcgc_state.bytes_since_collection > major_collection_threshold) {
+ qcgc_collect();
+ }
+ if (qcgc_state.bytes_since_incmark > incmark_threshold) {
+ qcgc_mark(true);
+ }
+
if (size <= 1<<QCGC_LARGE_ALLOC_THRESHOLD_EXP) {
// Use bump / fit allocator
if (true) { // FIXME: Implement reasonable switch
@@ -130,6 +160,11 @@
result = qcgc_large_allocate(size);
}
+ // XXX: Should we use cells instead of bytes?
+ qcgc_state.bytes_since_collection += size;
+ qcgc_state.bytes_since_incmark += size;
+
+
#if LOG_ALLOCATION
qcgc_event_logger_log(EVENT_ALLOCATE_DONE, sizeof(object_t *),
(uint8_t *) &result);
@@ -165,11 +200,12 @@
}
void qcgc_mark(bool incremental) {
-#if CHECKED
- assert(qcgc_state.phase == GC_PAUSE || qcgc_state.phase == GC_MARK);
-#endif
+ if (qcgc_state.phase == GC_COLLECT) {
+ return; // Fast exit when there is nothing to mark
+ }
// FIXME: Log some more information
qcgc_event_logger_log(EVENT_MARK_START, 0, NULL);
+ qcgc_state.bytes_since_incmark = 0;
if (qcgc_state.phase == GC_PAUSE) {
qcgc_state.phase = GC_MARK;
@@ -329,9 +365,79 @@
qcgc_state.phase = GC_PAUSE;
qcgc_event_logger_log(EVENT_SWEEP_DONE, 0, NULL);
+ update_weakrefs();
}
void qcgc_collect(void) {
qcgc_mark(false);
qcgc_sweep();
+ qcgc_state.bytes_since_collection = 0;
}
+
+void qcgc_register_weakref(object_t *weakrefobj, object_t **target) {
+#if CHECKED
+ assert((weakrefobj->flags & QCGC_PREBUILT_OBJECT) == 0);
+ assert((object_t *) qcgc_arena_addr((cell_t *) weakrefobj) !=
weakrefobj);
+#endif
+ // NOTE: At this point, the target must point to a pointer to a valid
+ // object. We don't register any weakrefs to prebuilt objects as they
+ // are always valid.
+ if (((*target)->flags & QCGC_PREBUILT_OBJECT) == 0) {
+ qcgc_state.weakrefs = qcgc_weakref_bag_add(qcgc_state.weakrefs,
+ (struct weakref_bag_item_s) {
+ .weakrefobj = weakrefobj,
+ .target = target});
+ }
+}
+
+QCGC_STATIC void update_weakrefs(void) {
+ size_t i = 0;
+ while (i < qcgc_state.weakrefs->count) {
+ struct weakref_bag_item_s item = qcgc_state.weakrefs->items[i];
+ // Check whether weakref object itself was collected
+ // We know the weakref object is a normal object
+ switch(qcgc_arena_get_blocktype((cell_t *) item.weakrefobj)) {
+ case BLOCK_EXTENT: // Fall through
+ case BLOCK_FREE:
+ // Weakref itself was collected, forget it
+ qcgc_state.weakrefs =
qcgc_weakref_bag_remove_index(
+ qcgc_state.weakrefs, i);
+ continue;
+ case BLOCK_BLACK:
+ case BLOCK_WHITE:
+ // Weakref object is still valid, continue
+ break;
+ }
+
+ // Check whether the weakref target is still valid
+ object_t *points_to = *item.target;
+ if ((object_t *) qcgc_arena_addr((cell_t *) points_to) ==
+ points_to) {
+ // Huge object
+ if (qcgc_hbtable_has(points_to)) {
+ // Still valid
+ i++;
+ } else {
+ // Invalid
+ *(item.target) = NULL;
+ qcgc_state.weakrefs =
qcgc_weakref_bag_remove_index(
+ qcgc_state.weakrefs, i);
+ }
+ } else {
+ // Normal object
+ switch(qcgc_arena_get_blocktype((cell_t *) points_to)) {
+ case BLOCK_BLACK: // Still valid
+ case BLOCK_WHITE:
+ i++;
+ break;
+ case BLOCK_EXTENT: // Fall through
+ case BLOCK_FREE:
+ // Invalid
+ *(item.target) = NULL;
+ qcgc_state.weakrefs =
qcgc_weakref_bag_remove_index(
+ qcgc_state.weakrefs, i);
+ break;
+ }
+ }
+ }
+}
diff --git a/rpython/translator/c/src/qcgc/qcgc.h
b/rpython/translator/c/src/qcgc/qcgc.h
--- a/rpython/translator/c/src/qcgc/qcgc.h
+++ b/rpython/translator/c/src/qcgc/qcgc.h
@@ -83,6 +83,15 @@
object_t *qcgc_shadowstack_pop(void);
/**
+ * Weakref registration
+ *
+ * @param weakrefobj Pointer to the weakref itself
+ * @param target Doublepointer to referenced object.
+ * The referenced object must be a
valid object.
+ */
+void qcgc_register_weakref(object_t *weakrefobj, object_t **target);
+
+/**
* Tracing function.
*
* This function traces an object, i.e. calls visit on every object referenced
diff --git a/rpython/translator/c/test/test_newgc.py
b/rpython/translator/c/test/test_newgc.py
--- a/rpython/translator/c/test/test_newgc.py
+++ b/rpython/translator/c/test/test_newgc.py
@@ -1280,6 +1280,9 @@
GC_CAN_MOVE = False
GC_CAN_SHRINK_ARRAY = False
removetypeptr = True
+
+ def test_framework_static_roots(self):
+ py.test.skip("not implemented")
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit