Author: Stefan Beyer <[email protected]>
Branch: cpyext-gc-cycle
Changeset: r98006:a3a571806246
Date: 2019-11-09 18:01 +0100
http://bitbucket.org/pypy/pypy/changeset/a3a571806246/
Log: Implemented working set for linked objects during rrc marking
Removed debug output
diff --git a/rpython/memory/gc/incminimark.py b/rpython/memory/gc/incminimark.py
--- a/rpython/memory/gc/incminimark.py
+++ b/rpython/memory/gc/incminimark.py
@@ -2136,6 +2136,12 @@
#
hdr.tid |= GCFLAG_VISITED
#
+ if self.rrc_enabled: # TODO: is this safe here?
+ if self.rrc_gc.state == RawRefCountBaseGC.STATE_MARKING:
+ self.rrc_gc.visit_pyobj(obj)
+ elif self.rrc_gc.state ==
RawRefCountBaseGC.STATE_GARBAGE_MARKING:
+ hdr.tid |= GCFLAG_GARBAGE
+ #
self.surviving_pinned_objects.append(
llarena.getfakearenaaddress(obj - size_gc_header))
self.pinned_objects_in_nursery += 1
@@ -2403,11 +2409,6 @@
self.collect_nonstack_roots()
self.visit_all_objects()
#
- # If enabled, do a major collection step for rrc objects.
- # TODO: move up before "if remaining >= estimate // 2" to
- # improve pause times, issues:
- # - (non-inc) mark expects all objects to be marked
- # - both do not rescan nonstack-roots
if self.rrc_enabled:
debug_print("starting rrc state:", self.rrc_gc.state)
debug_print("starting marking_state:",
self.rrc_gc.marking_state)
@@ -2742,9 +2743,11 @@
# to also set TRACK_YOUNG_PTRS here, for the write barrier.
hdr.tid |= GCFLAG_VISITED | GCFLAG_TRACK_YOUNG_PTRS
- if self.rrc_enabled and \
- self.rrc_gc.state == RawRefCountBaseGC.STATE_GARBAGE_MARKING:
- hdr.tid |= GCFLAG_GARBAGE
+ if self.rrc_enabled:
+ if self.rrc_gc.state == RawRefCountBaseGC.STATE_MARKING:
+ self.rrc_gc.visit_pyobj(obj)
+ elif self.rrc_gc.state == RawRefCountBaseGC.STATE_GARBAGE_MARKING:
+ hdr.tid |= GCFLAG_GARBAGE
if self.has_gcptr(llop.extract_ushort(llgroup.HALFWORD, hdr.tid)):
#
diff --git a/rpython/memory/gc/rrc/base.py b/rpython/memory/gc/rrc/base.py
--- a/rpython/memory/gc/rrc/base.py
+++ b/rpython/memory/gc/rrc/base.py
@@ -143,7 +143,7 @@
if not self.gc.is_young_object(obj):
lst = self.p_list_old
if self.state == self.STATE_MARKING:
- debug_print("added p_list", pyobject)
+ #debug_print("added p_list", pyobject)
self.p_list_old_added.append(pyobject)
lst.append(pyobject)
dct.setitem(obj, pyobject)
@@ -307,7 +307,7 @@
if surviving:
surviving_list.append(pyobject)
if working_set:
- debug_print("added p_list", pyobject)
+ #debug_print("added p_list", pyobject)
self.p_list_old_added.append(pyobject)
else:
self._free(pyobject)
diff --git a/rpython/memory/gc/rrc/incmark.py b/rpython/memory/gc/rrc/incmark.py
--- a/rpython/memory/gc/rrc/incmark.py
+++ b/rpython/memory/gc/rrc/incmark.py
@@ -25,6 +25,7 @@
self.state = self.STATE_MARKING
self.marking_state = 0
+ self.pyobj_to_trace = self.gc.AddressStack()
return False
if self.state == self.STATE_MARKING:
@@ -38,7 +39,6 @@
return False
elif self.marking_state == 1:
# initialize working set from roots, then pause
- self.pyobj_to_trace = self.gc.AddressStack()
for i in range(0, self.total_objs):
obj = self.snapshot_objs[i]
self._mark_rawrefcount_obj(obj)
@@ -104,6 +104,27 @@
debug_print("time mark p_list_old", time.time() - start)
return True
+ def visit_pyobj(self, gcobj):
+ # if there is a pyobj, add it to the working set
+ if self.gc.is_in_nursery(gcobj):
+ dct = self.p_dict_nurs # is this even possible?
+ else:
+ dct = self.p_dict
+ pyobject = dct.get(gcobj)
+ if pyobject <> llmemory.NULL:
+ pyobj = self._pyobj(pyobject)
+ gchdr = self.pyobj_as_gc(pyobj)
+ if gchdr <> lltype.nullptr(self.PYOBJ_GC_HDR):
+ if gchdr.c_gc_refs != self.RAWREFCOUNT_REFS_REACHABLE and \
+ gchdr.c_gc_refs != self.RAWREFCOUNT_REFS_UNTRACKED: #
object is in snapshot
+ pass
+ c_gc_refs = self._pyobj_gc_refcnt_get(gchdr)
+ index = c_gc_refs - 1
+ snapobj = self.snapshot_objs[index]
+ if snapobj.refcnt == 0:
+ addr = llmemory.cast_ptr_to_adr(snapobj)
+ self.pyobj_to_trace.append(addr)
+
def _sync_snapshot(self):
# sync snapshot with pyob_list:
# * check the consistency of "dead" objects and keep all of them
@@ -140,9 +161,6 @@
consistent = pyobj.c_ob_refcnt == snapobj.refcnt_original
if not consistent:
break
- # move to separate list
- #self.p_list_old.remove(snapobj.pyobj) # TODO: this might be
evil... do something different... -> unlink? special link?
-
# remove link, to free non-gc (so they won't get marked and
are freed)
pyobj = llmemory.cast_adr_to_ptr(snapobj.pyobj,
self.PYOBJ_HDR_PTR)
link = llmemory.cast_int_to_adr(pyobj.c_ob_pypy_link)
@@ -175,7 +193,6 @@
else:
# new object, keep alive
self._pyobj_gc_refcnt_set(pygchdr, 1)
- #pygchdr.c_gc_refs = 1 << self.RAWREFCOUNT_REFS_SHIFT
pyobj = self.gc_as_pyobj(pygchdr)
if pyobj.c_ob_pypy_link != 0:
addr = llmemory.cast_int_to_adr(pyobj.c_ob_pypy_link)
@@ -219,13 +236,11 @@
pygchdr = pygchdr_continue_gc
while pygchdr <> self.pyobj_list:
self._pyobj_gc_refcnt_set(pygchdr, 1)
- #pygchdr.c_gc_refs = 1 << self.RAWREFCOUNT_REFS_SHIFT
pygchdr = pygchdr.c_gc_next
pygchdr = self.pyobj_old_list.c_gc_next
# resurrect "dead" objects
while pygchdr <> self.pyobj_old_list:
self._pyobj_gc_refcnt_set(pygchdr, 1)
- #pygchdr.c_gc_refs = 1 << self.RAWREFCOUNT_REFS_SHIFT
pygchdr = pygchdr.c_gc_next
# merge lists
if not self._gc_list_is_empty(self.pyobj_old_list):
@@ -249,12 +264,10 @@
pygchdr = pygchdr_continue_isolate
while pygchdr <> self.pyobj_isolate_old_list:
self._pyobj_gc_refcnt_set(pygchdr, 1)
- #pygchdr.c_gc_refs = 1 << self.RAWREFCOUNT_REFS_SHIFT
pygchdr = pygchdr.c_gc_next
# resurrect "dead" objects
while pygchdr <> self.pyobj_isolate_dead_list:
self._pyobj_gc_refcnt_set(pygchdr, 1)
- #pygchdr.c_gc_refs = 1 << self.RAWREFCOUNT_REFS_SHIFT
pygchdr = pygchdr.c_gc_next
# merge lists
if not self._gc_list_is_empty(self.pyobj_isolate_old_list):
@@ -269,9 +282,7 @@
def _check_consistency_gc(self, pygchdr, pylist_dead_target):
c_gc_refs = self._pyobj_gc_refcnt_get(pygchdr)
snapobj = self.snapshot_objs[c_gc_refs - 1]
- #snapobj = self.snapshot_objs[pygchdr.c_gc_refs - 1]
self._pyobj_gc_refcnt_set(pygchdr, snapobj.refcnt)
- #pygchdr.c_gc_refs = snapobj.refcnt
if snapobj.refcnt == 0: # object considered dead
# check consistency (dead subgraphs can never change):
pyobj = self.gc_as_pyobj(pygchdr)
@@ -358,9 +369,7 @@
simple_limit += 1
if simple_limit > self.inc_limit: # TODO: add test
reached_limit = True
- self.gc.visit_all_objects() # TODO: implement sane limit (ex.
half of normal limit), retrace proxies
- self.p_list_old.foreach(self._mark_rawrefcount_linked, None)
- self.o_list_old.foreach(self._mark_rawrefcount_linked, None)
+ self.gc.visit_all_objects() # TODO: implement sane limit
first = False
return not reached_limit # are there any objects left?
diff --git a/rpython/memory/gc/rrc/mark.py b/rpython/memory/gc/rrc/mark.py
--- a/rpython/memory/gc/rrc/mark.py
+++ b/rpython/memory/gc/rrc/mark.py
@@ -12,6 +12,7 @@
if self.state == self.STATE_DEFAULT:
self.state = self.STATE_MARKING
+ self.pyobj_to_trace = self.gc.AddressStack()
# First, untrack all tuples with only non-gc rrc objects and promote
# all other tuples to the pyobj_list
@@ -67,6 +68,21 @@
self.state = self.STATE_DEFAULT
return True
+ def visit_pyobj(self, gcobj):
+ # if there is a pyobj, add it to the working set
+ if self.gc.is_in_nursery(gcobj):
+ dct = self.p_dict_nurs # is this even possible?
+ else:
+ dct = self.p_dict
+ pyobject = dct.get(gcobj)
+ if pyobject <> llmemory.NULL:
+ pyobj = self._pyobj(pyobject)
+ gchdr = self.pyobj_as_gc(pyobj)
+ if gchdr <> lltype.nullptr(self.PYOBJ_GC_HDR):
+ if gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT == 0:
+ addr = llmemory.cast_ptr_to_adr(gchdr)
+ self.pyobj_to_trace.append(addr)
+
def to_obj(self, pyobject):
if self.use_refcntdict:
obj = self.pypy_link_dict.get(pyobject)
@@ -165,7 +181,6 @@
pyobj_old = self.pyobj_list
# initialize working set
- self.pyobj_to_trace = self.gc.AddressStack()
gchdr = self.pyobj_old_list.c_gc_next
while gchdr <> self.pyobj_old_list:
next_old = gchdr.c_gc_next
@@ -189,8 +204,6 @@
gchdr.c_gc_refs += 1 << self.RAWREFCOUNT_REFS_SHIFT
self._mark_rawrefcount_obj(gchdr, pyobj_old)
self.gc.visit_all_objects()
- self.p_list_old.foreach(self._mark_rawrefcount_linked, None)
- self.o_list_old.foreach(self._mark_rawrefcount_linked, None)
# now all rawrefcounted objects, which are alive, have a cyclic
# refcount > 0 or are marked
@@ -230,4 +243,4 @@
if gchdr <> lltype.nullptr(self.PYOBJ_GC_HDR):
if gchdr.c_gc_refs >> self.RAWREFCOUNT_REFS_SHIFT == 0:
addr = llmemory.cast_ptr_to_adr(gchdr)
- self.pyobj_to_trace.append(addr)
+ self.pyobj_to_trace.append(addr)
\ No newline at end of file
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit