Author: Remi Meier
Branch:
Changeset: r996:ae62acdb5d7c
Date: 2014-03-13 10:31 +0100
http://bitbucket.org/pypy/stmgc/changeset/ae62acdb5d7c/
Log: merge weakref support
diff --git a/c7/stm/core.c b/c7/stm/core.c
--- a/c7/stm/core.c
+++ b/c7/stm/core.c
@@ -180,6 +180,7 @@
}
assert(list_is_empty(STM_PSEGMENT->modified_old_objects));
+ assert(list_is_empty(STM_PSEGMENT->young_weakrefs));
assert(tree_is_cleared(STM_PSEGMENT->young_outside_nursery));
assert(tree_is_cleared(STM_PSEGMENT->nursery_objects_shadows));
assert(tree_is_cleared(STM_PSEGMENT->callbacks_on_abort));
@@ -483,6 +484,7 @@
/* reset these lists to NULL too on abort */
LIST_FREE(pseg->objects_pointing_to_nursery);
LIST_FREE(pseg->large_overflow_objects);
+ list_clear(pseg->young_weakrefs);
}
static void abort_with_mutex(void)
diff --git a/c7/stm/core.h b/c7/stm/core.h
--- a/c7/stm/core.h
+++ b/c7/stm/core.h
@@ -61,7 +61,7 @@
current transaction that have been flushed out of the nursery,
which occurs if the same transaction allocates too many objects.
*/
- GCFLAG_OVERFLOW_NUMBER_bit0 = 0x08 /* must be last */
+ GCFLAG_OVERFLOW_NUMBER_bit0 = 0x8 /* must be last */
};
@@ -105,6 +105,15 @@
next minor collection. */
struct tree_s *nursery_objects_shadows;
+ /* List of all young weakrefs to check in minor collections. These
+ are the only weakrefs that may point to young objects and never
+ contain NULL. */
+ struct list_s *young_weakrefs;
+
+ /* List of all old weakrefs to check in major collections. These
+ weakrefs never point to young objects and never contain NULL. */
+ struct list_s *old_weakrefs;
+
/* Tree of 'key->callback' associations from stm_call_on_abort() */
struct tree_s *callbacks_on_abort;
diff --git a/c7/stm/gcpage.c b/c7/stm/gcpage.c
--- a/c7/stm/gcpage.c
+++ b/c7/stm/gcpage.c
@@ -450,7 +450,11 @@
/* 'objects_pointing_to_nursery' should be empty, but isn't
necessarily because it also lists objects that have been
written to but don't actually point to the nursery. Clear
- it up and set GCFLAG_WRITE_BARRIER again on the objects. */
+ it up and set GCFLAG_WRITE_BARRIER again on the objects.
+ This is the case for transactions where
+ MINOR_NOTHING_TO_DO() == false
+ but they still did write-barriers on objects
+ */
lst = pseg->objects_pointing_to_nursery;
if (lst != NULL) {
LIST_FOREACH_R(lst, uintptr_t /*item*/,
@@ -537,6 +541,9 @@
mark_visit_from_roots();
LIST_FREE(mark_objects_to_trace);
+ /* weakrefs: */
+ stm_visit_old_weakrefs();
+
/* cleanup */
clean_up_segment_lists();
diff --git a/c7/stm/nursery.c b/c7/stm/nursery.c
--- a/c7/stm/nursery.c
+++ b/c7/stm/nursery.c
@@ -299,6 +299,9 @@
collect_oldrefs_to_nursery();
+ /* now all surviving nursery objects have been moved out */
+ stm_move_young_weakrefs();
+
throw_away_nursery(get_priv_segment(STM_SEGMENT->segment_num));
assert(MINOR_NOTHING_TO_DO(STM_PSEGMENT));
diff --git a/c7/stm/setup.c b/c7/stm/setup.c
--- a/c7/stm/setup.c
+++ b/c7/stm/setup.c
@@ -57,6 +57,8 @@
pr->objects_pointing_to_nursery = NULL;
pr->large_overflow_objects = NULL;
pr->modified_old_objects = list_create();
+ pr->young_weakrefs = list_create();
+ pr->old_weakrefs = list_create();
pr->young_outside_nursery = tree_create();
pr->nursery_objects_shadows = tree_create();
pr->callbacks_on_abort = tree_create();
@@ -95,6 +97,8 @@
assert(pr->objects_pointing_to_nursery == NULL);
assert(pr->large_overflow_objects == NULL);
list_free(pr->modified_old_objects);
+ list_free(pr->young_weakrefs);
+ list_free(pr->old_weakrefs);
tree_free(pr->young_outside_nursery);
tree_free(pr->nursery_objects_shadows);
tree_free(pr->callbacks_on_abort);
diff --git a/c7/stm/weakref.c b/c7/stm/weakref.c
new file mode 100644
--- /dev/null
+++ b/c7/stm/weakref.c
@@ -0,0 +1,138 @@
+#ifndef _STM_CORE_H_
+# error "must be compiled via stmgc.c"
+#endif
+
+#define WEAKREF_PTR(wr, sz) ((object_t * TLPREFIX *)(((stm_char *)(wr)) +
(sz) - sizeof(void*)))
+
+object_t *stm_allocate_weakref(ssize_t size_rounded_up)
+{
+ OPT_ASSERT(size_rounded_up > sizeof(struct object_s));
+ OPT_ASSERT(size_rounded_up == 16); /* no reason for it to be anything else
*/
+
+ object_t *obj = stm_allocate(size_rounded_up);
+ assert(_is_in_nursery(obj)); /* because it's so small */
+
+ LIST_APPEND(STM_PSEGMENT->young_weakrefs, obj);
+ return obj;
+}
+
+
+static void _set_weakref_in_all_segments(object_t *weakref, object_t *value)
+{
+ ssize_t size = 16;
+
+ stm_char *point_to_loc = (stm_char*)WEAKREF_PTR(weakref, size);
+ if (flag_page_private[(uintptr_t)point_to_loc / 4096UL] == PRIVATE_PAGE) {
+ long i;
+ for (i = 0; i < NB_SEGMENTS; i++) {
+ char *base = get_segment_base(i); /* two different segments */
+
+ object_t ** ref_loc = (object_t **)REAL_ADDRESS(base,
point_to_loc);
+ *ref_loc = value;
+ }
+ }
+ else {
+ *WEAKREF_PTR(weakref, size) = value;
+ }
+}
+
+/***** Minor collection *****/
+
+static void stm_move_young_weakrefs()
+{
+ /* The code relies on the fact that no weakref can be an old object
+ weakly pointing to a young object. Indeed, weakrefs are immutable
+ so they cannot point to an object that was created after it.
+ */
+ LIST_FOREACH_R(
+ STM_PSEGMENT->young_weakrefs,
+ object_t * /*item*/,
+ ({
+ /* weakrefs are so small, they always are in the nursery. Never
+ a young outside nursery object. */
+ assert(_is_in_nursery(item));
+ object_t *TLPREFIX *pforwarded_array = (object_t *TLPREFIX *)item;
+
+ /* the following checks are done like in nursery.c: */
+ if (!(item->stm_flags & GCFLAG_HAS_SHADOW)
+ || (pforwarded_array[0] != GCWORD_MOVED)) {
+ /* weakref dies */
+ continue;
+ }
+
+ item = pforwarded_array[1]; /* moved location */
+
+ assert(!_is_young(item));
+
+ ssize_t size = 16;
+ object_t *pointing_to = *WEAKREF_PTR(item, size);
+ assert(pointing_to != NULL);
+
+ if (_is_in_nursery(pointing_to)) {
+ object_t *TLPREFIX *pforwarded_array = (object_t *TLPREFIX
*)pointing_to;
+ /* the following checks are done like in nursery.c: */
+ if (!(pointing_to->stm_flags & GCFLAG_HAS_SHADOW)
+ || (pforwarded_array[0] != GCWORD_MOVED)) {
+ /* pointing_to dies */
+ _set_weakref_in_all_segments(item, NULL);
+ continue; /* no need to remember in old_weakrefs */
+ }
+ else {
+ /* moved location */
+ _set_weakref_in_all_segments(item, pforwarded_array[1]);
+ }
+ }
+ else {
+ /* young outside nursery object or old object */
+ if (tree_contains(STM_PSEGMENT->young_outside_nursery,
+ (uintptr_t)pointing_to)) {
+ /* still in the tree -> wasn't seen by the minor
collection,
+ so it doesn't survive */
+ _set_weakref_in_all_segments(item, NULL);
+ continue; /* no need to remember in old_weakrefs */
+ }
+ /* pointing_to was already old */
+ }
+ LIST_APPEND(STM_PSEGMENT->old_weakrefs, item);
+ }));
+ list_clear(STM_PSEGMENT->young_weakrefs);
+}
+
+
+/***** Major collection *****/
+
+
+static void stm_visit_old_weakrefs(void)
+{
+ long i;
+ for (i = 0; i < NB_SEGMENTS; i++) {
+ struct stm_priv_segment_info_s *pseg = get_priv_segment(i);
+ struct list_s *lst;
+
+ lst = pseg->old_weakrefs;
+ uintptr_t n = list_count(lst);
+ while (n > 0) {
+ object_t *weakref = (object_t *)list_item(lst, --n);
+ if (!mark_visited_test(weakref)) {
+ /* weakref dies */
+ list_set_item(lst, n, list_pop_item(lst));
+ continue;
+ }
+
+ ssize_t size = 16;
+ object_t *pointing_to = *WEAKREF_PTR(weakref, size);
+ assert(pointing_to != NULL);
+ if (!mark_visited_test(pointing_to)) {
+ //assert(flag_page_private[(uintptr_t)weakref / 4096UL] !=
PRIVATE_PAGE);
+ _set_weakref_in_all_segments(weakref, NULL);
+
+ /* we don't need it in this list anymore */
+ list_set_item(lst, n, list_pop_item(lst));
+ continue;
+ }
+ else {
+ /* it survives! */
+ }
+ }
+ }
+}
diff --git a/c7/stm/weakref.h b/c7/stm/weakref.h
new file mode 100644
--- /dev/null
+++ b/c7/stm/weakref.h
@@ -0,0 +1,9 @@
+#ifndef _SRCSTM_WEAKREF_H
+#define _SRCSTM_WEAKREF_H
+
+object_t *stm_allocate_weakref(ssize_t size_rounded_up);
+static void stm_move_young_weakrefs(void);
+static void stm_visit_old_weakrefs(void);
+
+
+#endif
diff --git a/c7/stmgc.c b/c7/stmgc.c
--- a/c7/stmgc.c
+++ b/c7/stmgc.c
@@ -12,6 +12,7 @@
#include "stm/contention.h"
#include "stm/extra.h"
#include "stm/fprintcolor.h"
+#include "stm/weakref.h"
#include "stm/misc.c"
#include "stm/list.c"
@@ -28,3 +29,4 @@
#include "stm/contention.c"
#include "stm/extra.c"
#include "stm/fprintcolor.c"
+#include "stm/weakref.c"
diff --git a/c7/stmgc.h b/c7/stmgc.h
--- a/c7/stmgc.h
+++ b/c7/stmgc.h
@@ -194,6 +194,17 @@
return (object_t *)p;
}
+/* Allocate a weakref object. Weakref objects have a
+ reference to an object at the byte-offset
+ stmcb_size_rounded_up(obj) - sizeof(void*)
+ You must assign the reference before the next collection may happen.
+ After that, you must not mutate the reference anymore. However,
+ it can become NULL after any GC if the reference dies during that
+ collection.
+ NOTE: For performance, we assume stmcb_size_rounded_up(weakref)==16
+*/
+object_t *stm_allocate_weakref(ssize_t size_rounded_up);
+
/* stm_setup() needs to be called once at the beginning of the program.
stm_teardown() can be called at the end, but that's not necessary
diff --git a/c7/test/common.py b/c7/test/common.py
--- a/c7/test/common.py
+++ b/c7/test/common.py
@@ -14,7 +14,7 @@
os.path.join(parent_dir, "stmgc.c")] + [
os.path.join(parent_dir, 'stm', _n)
for _n in os.listdir(os.path.join(parent_dir, 'stm'))
- if _n.endswith('.h') or _n.endswith('.c')]
+ if (_n.endswith('.h') or _n.endswith('.c')) and not
_n.startswith('.')]
_pycache_ = os.path.join(parent_dir, 'test', '__pycache__')
if os.path.exists(_pycache_):
diff --git a/c7/test/support.py b/c7/test/support.py
--- a/c7/test/support.py
+++ b/c7/test/support.py
@@ -25,6 +25,7 @@
void stm_read(object_t *obj);
/*void stm_write(object_t *obj); use _checked_stm_write() instead */
object_t *stm_allocate(ssize_t size_rounded_up);
+object_t *stm_allocate_weakref(ssize_t size_rounded_up);
object_t *_stm_allocate_old(ssize_t size_rounded_up);
void stm_setup(void);
@@ -54,6 +55,10 @@
void _set_ptr(object_t *obj, int n, object_t *v);
object_t * _get_ptr(object_t *obj, int n);
+void _set_weakref(object_t *obj, object_t *v);
+object_t* _get_weakref(object_t *obj);
+
+
void _stm_start_safe_point(void);
bool _check_stop_safe_point(void);
@@ -163,6 +168,21 @@
}
+#define WEAKREF_PTR(wr, sz) ((object_t * TLPREFIX *)(((stm_char *)(wr)) +
(sz) - sizeof(void*)))
+void _set_weakref(object_t *obj, object_t *v)
+{
+ char *realobj = _stm_real_address(obj);
+ ssize_t size = stmcb_size_rounded_up((struct object_s *)realobj);
+ *WEAKREF_PTR(obj, size) = v;
+}
+
+object_t * _get_weakref(object_t *obj)
+{
+ char *realobj = _stm_real_address(obj);
+ ssize_t size = stmcb_size_rounded_up((struct object_s *)realobj);
+ return *WEAKREF_PTR(obj, size);
+}
+
void _set_ptr(object_t *obj, int n, object_t *v)
{
long nrefs = (long)((myobj_t*)obj)->type_id - 421420;
@@ -266,6 +286,18 @@
lib._set_type_id(o, tid)
return o
+def stm_allocate_weakref(point_to_obj, size=None):
+ assert HDR+WORD == 16
+ o = lib.stm_allocate_weakref(HDR + WORD)
+
+ tid = 421420
+ lib._set_type_id(o, tid)
+ lib._set_weakref(o, point_to_obj)
+ return o
+
+def stm_get_weakref(o):
+ return lib._get_weakref(o)
+
def stm_allocate_refs(n):
o = lib.stm_allocate(HDR + n * WORD)
tid = 421420 + n
diff --git a/c7/test/test_weakref.py b/c7/test/test_weakref.py
new file mode 100644
--- /dev/null
+++ b/c7/test/test_weakref.py
@@ -0,0 +1,255 @@
+import py
+from support import *
+
+
+
+
+class TestMinorCollection(BaseTest):
+ def test_simple(self):
+ lib._stm_set_nursery_free_count(2048)
+ self.start_transaction()
+
+ self.push_root_no_gc()
+ lp2 = stm_allocate(48)
+ lp1 = stm_allocate_weakref(lp2) # no collection here
+ self.pop_root()
+
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ stm_minor_collect()
+ lp1 = self.pop_root()
+ # lp2 died
+ assert stm_get_weakref(lp1) == ffi.NULL
+
+ self.push_root(lp1)
+ stm_minor_collect()
+ lp1 = self.pop_root()
+ # lp2 died
+ assert stm_get_weakref(lp1) == ffi.NULL
+
+ def test_still_simple(self):
+ lib._stm_set_nursery_free_count(2048)
+ self.start_transaction()
+
+ self.push_root_no_gc()
+ lp2 = stm_allocate(48)
+ lp1 = stm_allocate_weakref(lp2) # no collection here
+ self.pop_root()
+
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ self.push_root(lp2)
+ stm_minor_collect()
+ lp2 = self.pop_root()
+ lp1 = self.pop_root()
+ # lp2 survived
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ self.push_root(lp2)
+ stm_minor_collect()
+ lp2 = self.pop_root()
+ lp1 = self.pop_root()
+ # lp2 survived
+ assert stm_get_weakref(lp1) == lp2
+
+ def test_weakref_itself_dies(self):
+ self.start_transaction()
+
+ self.push_root_no_gc()
+ lp2 = stm_allocate(48)
+ stm_allocate_weakref(lp2) # no collection here
+ self.pop_root()
+ stm_minor_collect()
+ assert lib._stm_total_allocated() == 0
+
+
+ def test_weakref_old_keep(self):
+ lp0 = stm_allocate_old(48)
+
+ self.start_transaction()
+ self.push_root_no_gc()
+ lp1 = stm_allocate_weakref(lp0) # no collection here
+ self.pop_root()
+
+ self.push_root(lp1)
+ stm_minor_collect()
+ lp1 = self.pop_root()
+
+ assert stm_get_weakref(lp1) == lp0
+
+
+ def test_abort_cleanup(self):
+ self.start_transaction()
+
+ self.push_root_no_gc()
+ lp1 = stm_allocate_weakref(ffi.NULL) # no collection here
+ self.pop_root()
+
+ self.abort_transaction()
+ self.start_transaction()
+
+ def test_big_alloc_sizes(self):
+ sizes = [lib._STM_FAST_ALLOC + 16, 48,]
+
+ for osize in sizes:
+ self.start_transaction()
+ self.push_root_no_gc()
+ lp2 = stm_allocate(osize)
+ lp1 = stm_allocate_weakref(lp2) # no collection here
+ self.pop_root()
+
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ self.push_root(lp2)
+ stm_minor_collect()
+ lp2 = self.pop_root()
+ lp1 = self.pop_root()
+ # lp2 survived
+ assert stm_get_weakref(lp1) == lp2
+ self.abort_transaction()
+
+
+ def test_multiple_threads(self):
+ self.start_transaction()
+ lp0 = stm_allocate(1024)
+ self.push_root(lp0)
+ self.commit_transaction()
+
+ self.start_transaction()
+ lp0 = self.pop_root()
+ self.push_root(lp0)
+ stm_write(lp0) # privatize page
+
+ self.push_root_no_gc()
+ lp2 = stm_allocate(48)
+ lp1 = stm_allocate_weakref(lp2) # no collection here
+ self.pop_root()
+
+ self.push_root(lp0)
+ self.push_root(lp1)
+ self.commit_transaction()
+ # lp2 dies
+ lp1 = self.pop_root()
+ self.push_root(lp1)
+
+ assert stm_get_weakref(lp1) == ffi.NULL
+
+ self.switch(1)
+
+ self.start_transaction()
+ assert stm_get_weakref(lp1) == ffi.NULL
+
+
+
+
+class TestMajorCollection(BaseTest):
+ def test_simple(self):
+ self.start_transaction()
+
+ self.push_root_no_gc()
+ lp2 = stm_allocate(48)
+ lp1 = stm_allocate_weakref(lp2) # no collection here
+ self.pop_root()
+
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ self.push_root(lp2)
+ stm_minor_collect()
+ lp2 = self.pop_root()
+ lp1 = self.pop_root()
+ # lp2 survived
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ stm_minor_collect()
+ lp1 = self.pop_root()
+ # lp2 survived because no major collection
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ stm_major_collect()
+ lp1 = self.pop_root()
+ # lp2 died
+ assert stm_get_weakref(lp1) == ffi.NULL
+
+ def test_weakref_old_keep(self):
+ lp0 = stm_allocate_old(48)
+
+ self.start_transaction()
+ self.push_root_no_gc()
+ lp1 = stm_allocate_weakref(lp0) # no collection here
+ self.pop_root()
+
+ self.push_root(lp1)
+ stm_major_collect()
+ lp1 = self.pop_root()
+
+ assert stm_get_weakref(lp1) == lp0
+
+ def test_survive(self):
+ self.start_transaction()
+
+ self.push_root_no_gc()
+ lp2 = stm_allocate(48)
+ lp1 = stm_allocate_weakref(lp2) # no collection here
+ self.pop_root()
+
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ self.push_root(lp2)
+ stm_major_collect()
+ lp2 = self.pop_root()
+ lp1 = self.pop_root()
+ # lp2 survived
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ stm_minor_collect()
+ lp1 = self.pop_root()
+ # lp2 survived because no major collection
+ assert stm_get_weakref(lp1) == lp2
+
+ self.push_root(lp1)
+ stm_major_collect()
+ lp1 = self.pop_root()
+ # lp2 died
+ assert stm_get_weakref(lp1) == ffi.NULL
+
+ def test_multiple_threads(self):
+ self.start_transaction()
+ lp0 = stm_allocate(48)
+ lp1 = stm_allocate_weakref(lp0) # no collection here
+ self.push_root(lp1)
+ self.push_root(lp0)
+ self.commit_transaction()
+
+ self.start_transaction()
+ lp0 = self.pop_root()
+ lp1 = self.pop_root()
+ self.push_root(lp1)
+
+ stm_write(lp0) # privatize page with weakref in it too
+
+ assert stm_get_page_flag(stm_get_obj_pages(lp1)[0]) == PRIVATE_PAGE
+ assert stm_get_weakref(lp1) == lp0
+
+ self.commit_transaction()
+ self.start_transaction()
+
+ # lp0 dies
+ stm_major_collect()
+
+ assert stm_get_weakref(lp1) == ffi.NULL
+ print stm_get_real_address(lp1)
+
+ self.switch(1)
+
+ self.start_transaction()
+ assert stm_get_weakref(lp1) == ffi.NULL
+ print stm_get_real_address(lp1)
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit