[pypy-commit] pypy py3.5: Messed up the saving/restoring of exceptions inside _continuation.

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: py3.5
Changeset: r88492:7dc83389f688
Date: 2016-11-20 16:04 +0100
http://bitbucket.org/pypy/pypy/changeset/7dc83389f688/

Log:Messed up the saving/restoring of exceptions inside _continuation.
Do it hopefully right. Fixes most of
pypy/module/test_lib_pypy/test_greenlet.

diff --git a/pypy/module/_continuation/interp_continuation.py 
b/pypy/module/_continuation/interp_continuation.py
--- a/pypy/module/_continuation/interp_continuation.py
+++ b/pypy/module/_continuation/interp_continuation.py
@@ -42,12 +42,12 @@
 bottomframe.locals_cells_stack_w[3] = w_kwds
 bottomframe.last_exception = get_cleared_operation_error(space)
 self.bottomframe = bottomframe
-self.saved_exception = None
 #
 global_state.origin = self
 self.sthread = sthread
+saved_exception = pre_switch(sthread)
 h = sthread.new(new_stacklet_callback)
-post_switch(sthread, h)
+post_switch(sthread, h, saved_exception)
 
 def switch(self, w_to):
 sthread = self.sthread
@@ -83,8 +83,9 @@
 # double switch: the final destination is to.h
 global_state.destination = to
 #
+saved_exception = pre_switch(sthread)
 h = sthread.switch(global_state.destination.h)
-return post_switch(sthread, h)
+return post_switch(sthread, h, saved_exception)
 
 @unwrap_spec(w_value = WrappedDefault(None),
  w_to = WrappedDefault(None))
@@ -228,8 +229,6 @@
 def new_stacklet_callback(h, arg):
 self = global_state.origin
 self.h = h
-self.saved_exception = self.sthread.ec.sys_exc_info()
-self.sthread.ec.set_sys_exc_info(None)
 global_state.clear()
 try:
 frame = self.bottomframe
@@ -243,7 +242,12 @@
 global_state.destination = self
 return self.h
 
-def post_switch(sthread, h):
+def pre_switch(sthread):
+saved_exception = sthread.ec.sys_exc_info()
+sthread.ec.set_sys_exc_info(None)
+return saved_exception
+
+def post_switch(sthread, h, saved_exception):
 origin = global_state.origin
 self = global_state.destination
 global_state.origin = None
@@ -251,12 +255,10 @@
 self.h, origin.h = origin.h, h
 #
 current = sthread.ec.topframeref
-saved_exc = sthread.ec.sys_exc_info()
 sthread.ec.topframeref = self.bottomframe.f_backref
-sthread.ec.set_sys_exc_info(self.saved_exception)
+sthread.ec.set_sys_exc_info(saved_exception)
 self.bottomframe.f_backref = origin.bottomframe.f_backref
 origin.bottomframe.f_backref = current
-origin.saved_exception = saved_exc
 #
 return get_result()
 
diff --git a/pypy/module/_continuation/test/test_stacklet.py 
b/pypy/module/_continuation/test/test_stacklet.py
--- a/pypy/module/_continuation/test/test_stacklet.py
+++ b/pypy/module/_continuation/test/test_stacklet.py
@@ -708,3 +708,32 @@
 
 continulet.switch(c1, to=c2)
 raises(error, continulet.switch, c1, to=c2)
+
+def test_exc_info_save_restore(self):
+from _continuation import continulet
+import sys
+main = []
+
+def f(c):
+print("in f... 222")
+try:
+raise ValueError('fun')
+except:
+print("333")
+exc_info = sys.exc_info()
+print("444")
+c17650 = continulet(h)
+bd50.switch(to=c17650)
+print("back in f...")
+assert exc_info == sys.exc_info()
+
+def h(c):
+print("in h... 555")
+assert sys.exc_info() == (None, None, None)
+print("666")
+
+main = continulet.__new__(continulet)
+print(111)
+bd50 = continulet(f)
+main.switch(to=bd50)
+print(999)
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy default: Test and fix for 'methodcaller(..., self=...)'

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: 
Changeset: r88493:030861c2e0a2
Date: 2016-11-20 16:48 +0100
http://bitbucket.org/pypy/pypy/changeset/030861c2e0a2/

Log:Test and fix for 'methodcaller(..., self=...)'

diff --git a/pypy/module/operator/app_operator.py 
b/pypy/module/operator/app_operator.py
--- a/pypy/module/operator/app_operator.py
+++ b/pypy/module/operator/app_operator.py
@@ -130,9 +130,12 @@
 
 
 class methodcaller(object):
-def __init__(self, method_name, *args, **kwargs):
+def __init__(*args, **kwargs):
+if len(args) < 2:
+raise TypeError("methodcaller() called with not enough arguments")
+self, method_name = args[:2]
 self._method_name = method_name
-self._args = args
+self._args = args[2:]
 self._kwargs = kwargs
 
 def __call__(self, obj):
diff --git a/pypy/module/operator/test/test_operator.py 
b/pypy/module/operator/test/test_operator.py
--- a/pypy/module/operator/test/test_operator.py
+++ b/pypy/module/operator/test/test_operator.py
@@ -244,6 +244,13 @@
 assert methodcaller("method", 4, 5)(x) == (4, 5)
 assert methodcaller("method", 4, arg2=42)(x) == (4, 42)
 
+def test_methodcaller_self(self):
+from operator import methodcaller
+class X:
+def method(myself, self):
+return self * 6
+assert methodcaller("method", self=7)(X()) == 42
+
 def test_index(self):
 import operator
 assert operator.index(42) == 42
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3.5: hg merge default

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: py3.5
Changeset: r88494:c43509390348
Date: 2016-11-20 16:49 +0100
http://bitbucket.org/pypy/pypy/changeset/c43509390348/

Log:hg merge default

diff --git a/pypy/module/operator/app_operator.py 
b/pypy/module/operator/app_operator.py
--- a/pypy/module/operator/app_operator.py
+++ b/pypy/module/operator/app_operator.py
@@ -91,11 +91,14 @@
 
 
 class methodcaller(object):
-def __init__(self, method_name, *args, **kwargs):
+def __init__(*args, **kwargs):
+if len(args) < 2:
+raise TypeError("methodcaller() called with not enough arguments")
+self, method_name = args[:2]
 if not isinstance(method_name, str):
 raise TypeError("method name must be a string")
 self._method_name = method_name
-self._args = args
+self._args = args[2:]
 self._kwargs = kwargs
 
 def __call__(self, obj):
diff --git a/pypy/module/operator/test/test_operator.py 
b/pypy/module/operator/test/test_operator.py
--- a/pypy/module/operator/test/test_operator.py
+++ b/pypy/module/operator/test/test_operator.py
@@ -182,6 +182,13 @@
 assert methodcaller("method", 4, 5)(x) == (4, 5)
 assert methodcaller("method", 4, arg2=42)(x) == (4, 42)
 
+def test_methodcaller_self(self):
+from operator import methodcaller
+class X:
+def method(myself, self):
+return self * 6
+assert methodcaller("method", self=7)(X()) == 42
+
 def test_methodcaller_not_string(self):
 import _operator as operator
 e = raises(TypeError, operator.methodcaller, 42)
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3.5: Using check_impl_detail() like this is not following the spirit of

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: py3.5
Changeset: r88495:818fb73d805d
Date: 2016-11-20 16:54 +0100
http://bitbucket.org/pypy/pypy/changeset/818fb73d805d/

Log:Using check_impl_detail() like this is not following the spirit of
check_impl_detail(), which should be: "if false, avoid checking some
detail", not "if false, check that the value is really different".

Removed anyway, as it seems pypy gives the same answer as cpython
nowadays.

diff --git a/lib-python/3/test/test_index.py b/lib-python/3/test/test_index.py
--- a/lib-python/3/test/test_index.py
+++ b/lib-python/3/test/test_index.py
@@ -66,10 +66,7 @@
 direct_index = my_int.__index__()
 operator_index = operator.index(my_int)
 self.assertEqual(direct_index, 8)
-if support.check_impl_detail():
-self.assertEqual(operator_index, 7)
-else:
-self.assertEqual(operator_index, 8)
+self.assertEqual(operator_index, 7)
 # Both results should be of exact type int.
 self.assertIs(type(direct_index), int)
 #self.assertIs(type(operator_index), int)
@@ -90,10 +87,7 @@
 
 bad_int = BadInt2()
 n = operator.index(bad_int)
-if support.check_impl_detail():
-self.assertEqual(n, 0)
-else:
-self.assertEqual(n, 1)
+self.assertEqual(n, 0)
 
 
 class SeqTestCase:
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] extradoc extradoc: Another CPython 3.5 failure

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: extradoc
Changeset: r5750:b6cd4ef2e6cc
Date: 2016-11-20 17:16 +0100
http://bitbucket.org/pypy/extradoc/changeset/b6cd4ef2e6cc/

Log:Another CPython 3.5 failure

diff --git a/planning/py3.5/cpython-crashers.rst 
b/planning/py3.5/cpython-crashers.rst
--- a/planning/py3.5/cpython-crashers.rst
+++ b/planning/py3.5/cpython-crashers.rst
@@ -144,3 +144,38 @@
 except IndexError:
 assert next(gen) is 1
 assert next(gen) is 2# <==
+
+* frame.clear() does not clear f_locals, unlike what a test says
+  (Lib/test/test_frame.py)::
+
+def test_locals_clear_locals(self):
+# Test f_locals before and after clear() (to exercise caching)
+f, outer, inner = self.make_frames()
+outer.f_locals
+inner.f_locals
+outer.clear()
+inner.clear()
+self.assertEqual(outer.f_locals, {})
+self.assertEqual(inner.f_locals, {})
+
+  This test passes, but the C-level PyFrameObject has got a strong
+  reference to f_locals, which is only updated (to be empty) if the
+  Python code tries to read this attribute.  In the normal case,
+  code that calls clear() but doesn't read f_locals afterwards will
+  still leak everything contained in the C-level f_locals field.  This
+  can be shown by this failing test::
+
+import sys
+
+def g():
+x = 42
+return sys._getframe()
+
+frame = g()
+d = frame.f_locals
+frame.clear()
+print(d)
+assert d == {}   # fails!  but 'assert d is frame.f_locals' passes,
+ # which shows that this dict is kept alive by
+ # 'frame'; and we've seen that it is non-empty
+ # as long as we don't read frame.f_locals.
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3.5: Test and fix for frame.clear()

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: py3.5
Changeset: r88496:e7e2aa356442
Date: 2016-11-20 17:26 +0100
http://bitbucket.org/pypy/pypy/changeset/e7e2aa356442/

Log:Test and fix for frame.clear()

diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
--- a/pypy/interpreter/pyframe.py
+++ b/pypy/interpreter/pyframe.py
@@ -782,8 +782,10 @@
 
 def descr_clear(self, space):
 """F.clear(): clear most references held by the frame"""
-# Clears a random subset of the attributes (e.g. the fast
-# locals, but not f_locals).
+# Clears a random subset of the attributes: the local variables
+# and the w_locals.  Note that CPython doesn't clear f_locals
+# (which can create leaks) but it's hard to notice because
+# the next Python-level read of 'frame.f_locals' will clear it.
 if not self.frame_finished_execution:
 if not self._is_generator_or_coroutine():
 raise oefmt(space.w_RuntimeError,
@@ -800,6 +802,8 @@
 debug = self.getdebug()
 if debug is not None:
 debug.w_f_trace = None
+if debug.w_locals is not None:
+debug.w_locals = space.newdict()
 
 # clear the locals, including the cell/free vars, and the stack
 for i in range(len(self.locals_cells_stack_w)):
diff --git a/pypy/interpreter/test/test_pyframe.py 
b/pypy/interpreter/test/test_pyframe.py
--- a/pypy/interpreter/test/test_pyframe.py
+++ b/pypy/interpreter/test/test_pyframe.py
@@ -618,6 +618,15 @@
 #
 raises(StopIteration, next, gen)
 
+def test_frame_clear_really(self):
+import sys
+def f(x):
+return sys._getframe()
+frame = f(42)
+assert frame.f_locals['x'] == 42
+frame.clear()
+assert frame.f_locals == {}
+
 def test_throw_trace_bug(self):
 import sys
 def f():
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3.5: deque.__contains__()

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: py3.5
Changeset: r88497:4ab8b2a8f971
Date: 2016-11-20 17:34 +0100
http://bitbucket.org/pypy/pypy/changeset/4ab8b2a8f971/

Log:deque.__contains__()

diff --git a/pypy/module/_collections/interp_deque.py 
b/pypy/module/_collections/interp_deque.py
--- a/pypy/module/_collections/interp_deque.py
+++ b/pypy/module/_collections/interp_deque.py
@@ -284,8 +284,7 @@
 self.modified()
 return w_obj
 
-def remove(self, w_x):
-"Remove first occurrence of value."
+def _find(self, w_x):
 space = self.space
 block = self.leftblock
 index = self.leftindex
@@ -295,14 +294,25 @@
 equal = space.eq_w(w_item, w_x)
 self.checklock(lock)
 if equal:
-self.del_item(i)
-return
+return i
 # Advance the block/index pair
 index += 1
 if index >= BLOCKLEN:
 block = block.rightlink
 index = 0
-raise oefmt(space.w_ValueError, "deque.remove(x): x not in deque")
+return -1
+
+def remove(self, w_x):
+"Remove first occurrence of value."
+i = self._find(w_x)
+if i < 0:
+raise oefmt(self.space.w_ValueError,
+"deque.remove(x): x not in deque")
+self.del_item(i)
+
+def contains(self, w_x):
+i = self._find(w_x)
+return self.space.newbool(i >= 0)
 
 def reverse(self):
 "Reverse *IN PLACE*."
@@ -582,6 +592,7 @@
 __imul__ = interp2app(W_Deque.imul),
 __rmul__ = interp2app(W_Deque.rmul),
 maxlen = GetSetProperty(W_Deque.get_maxlen),
+__contains__ = interp2app(W_Deque.contains),
 )
 
 # 
diff --git a/pypy/module/_collections/test/test_deque.py 
b/pypy/module/_collections/test/test_deque.py
--- a/pypy/module/_collections/test/test_deque.py
+++ b/pypy/module/_collections/test/test_deque.py
@@ -364,6 +364,8 @@
 d.insert(i, 'a')
 assert 'a' in d
 assert 'b' not in d
+assert d.__contains__('a')
+assert not d.__contains__('b')
 assert d.index('a') == i
 d = deque(range(10))
 d.insert(-1, 500)
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3.5: Deque addition specifically checks that the argument is another deque.

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: py3.5
Changeset: r88498:83ba4f51767c
Date: 2016-11-20 17:39 +0100
http://bitbucket.org/pypy/pypy/changeset/83ba4f51767c/

Log:Deque addition specifically checks that the argument is another
deque.

diff --git a/pypy/module/_collections/interp_deque.py 
b/pypy/module/_collections/interp_deque.py
--- a/pypy/module/_collections/interp_deque.py
+++ b/pypy/module/_collections/interp_deque.py
@@ -178,11 +178,12 @@
 raise
 self.append(w_obj)
 
-def add(self, w_iterable):
+def add(self, w_deque):
+deque = self.space.interp_w(W_Deque, w_deque)
 copy = W_Deque(self.space)
 copy.maxlen = self.maxlen
 copy.extend(self.iter())
-copy.extend(w_iterable)
+copy.extend(deque.iter())
 return self.space.wrap(copy)
 
 def iadd(self, w_iterable):
diff --git a/pypy/module/_collections/test/test_deque.py 
b/pypy/module/_collections/test/test_deque.py
--- a/pypy/module/_collections/test/test_deque.py
+++ b/pypy/module/_collections/test/test_deque.py
@@ -122,6 +122,10 @@
 d2 = deque([3,4,5])
 assert d1 + d2 == deque([1,2,3,3,4,5])
 
+def test_cannot_add_list(self):
+from _collections import deque
+raises(TypeError, "deque([2]) + [3]")
+
 def test_iadd(self):
 from _collections import deque
 d = deque('a')
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy py3.5: Fix the repr of defaultdicts in case of recursion

2016-11-20 Thread arigo
Author: Armin Rigo 
Branch: py3.5
Changeset: r88499:9865394f0452
Date: 2016-11-20 18:00 +0100
http://bitbucket.org/pypy/pypy/changeset/9865394f0452/

Log:Fix the repr of defaultdicts in case of recursion

diff --git a/pypy/module/_collections/app_defaultdict.py 
b/pypy/module/_collections/app_defaultdict.py
--- a/pypy/module/_collections/app_defaultdict.py
+++ b/pypy/module/_collections/app_defaultdict.py
@@ -31,14 +31,16 @@
 
 def __repr__(self, recurse=set()):
 # XXX not thread-safe, but good enough
+dictrepr = super(defaultdict, self).__repr__()
 if id(self) in recurse:
-return "defaultdict(...)"
-try:
-recurse.add(id(self))
-return "defaultdict(%s, %s)" % (repr(self.default_factory),
-super(defaultdict, 
self).__repr__())
-finally:
-recurse.remove(id(self))
+factoryrepr = "..."
+else:
+try:
+recurse.add(id(self))
+factoryrepr = repr(self.default_factory)
+finally:
+recurse.remove(id(self))
+return "defaultdict(%s, %s)" % (factoryrepr, dictrepr)
 
 def copy(self):
 return type(self)(self.default_factory, self)
diff --git a/pypy/module/_collections/test/test_defaultdict.py 
b/pypy/module/_collections/test/test_defaultdict.py
--- a/pypy/module/_collections/test/test_defaultdict.py
+++ b/pypy/module/_collections/test/test_defaultdict.py
@@ -90,3 +90,12 @@
 d = _collections.defaultdict(None, {3: 4})
 dict_iter = d.__reduce__()[4]
 assert type(dict_iter) is type(iter(d.items()))
+
+def test_rec_repr(self):
+import _collections
+class X(_collections.defaultdict):
+def mydefault(self):
+pass
+d = X.__new__(X)
+d.__init__(d.mydefault)
+assert repr(d).endswith('defaultdict(..., {})>, {})')
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy better-storesink: document and cleanup stuff

2016-11-20 Thread cfbolz
Author: Carl Friedrich Bolz 
Branch: better-storesink
Changeset: r88500:94adb0d43fce
Date: 2016-11-20 22:45 +0100
http://bitbucket.org/pypy/pypy/changeset/94adb0d43fce/

Log:document and cleanup stuff

diff --git a/rpython/translator/backendopt/cse.py 
b/rpython/translator/backendopt/cse.py
--- a/rpython/translator/backendopt/cse.py
+++ b/rpython/translator/backendopt/cse.py
@@ -1,3 +1,9 @@
+""" A very simple common subexpression elimination pass. It's a very simple
+forward pass, that simply eliminates operations that were executed in all paths
+leading to the current block. Information flows strictly forward, using a cache
+of already seen operations. Caches are merged at control flow merges.
+
+No loop invariant code motion occurs (yet). """
 import collections
 
 from rpython.translator.backendopt import support
@@ -40,6 +46,7 @@
 heapcache = {}
 if new_unions is None:
 new_unions = unionfind.UnionFind()
+# (opname, concretetype of result, args) -> previous (life) result
 self.purecache = purecache
 self.heapcache = heapcache
 self.variable_families = variable_families
@@ -61,6 +68,8 @@
 # the *cache dictionaries, never to actually put any new variable into
 # the graph, because the concretetypes can change when calling
 # _var_rep.
+if not isinstance(var, Variable):
+return var
 var = self.new_unions.find_rep(var)
 return self.variable_families.find_rep(var)
 
@@ -71,6 +80,9 @@
 return (opname, concretetype, tuple(listargs))
 
 def _find_new_res(self, results):
+""" merges a list of results into a new variable. If all the results
+are the same, just use that, in which case it's not necessary to pass
+it along any links either. """
 # helper function for _merge_results
 first = self._var_rep(results[0])
 newres = None
@@ -101,17 +113,27 @@
 backedge.args.append(newres)
 return newres
 
-def merge(self, firstlink, tuples, backedges):
+def _merge(self, firstlink, tuples, backedges):
+""" The core algorithm of merging: actually merge many caches. """
 purecache = {}
 block = firstlink.target
-# copy all operations that exist in *all* blocks over. need to add a 
new
-# inputarg if the result is really a variable
+# copy all operations that exist in *all* blocks over.
 
 # note that a backedge is not a problem for regular pure operations:
 # since the argument is a phi node iff it is not loop invariant,
 # copying things over is always save (yay SSA form!)
 
-# try non-straight merges
+# try non-straight merges: they are merges where the operands are
+# different in the previous blocks, but where the arguments themselves
+# are merged into a new variable in the target block
+# this is code like this:
+# if 
+# x = i + 1
+# a = i
+# else:
+# y = j + 1
+# a = j
+# here, a + 1 is redundant, and can be replaced by the merge of x and y
 for argindex in range(len(block.inputargs)):
 inputarg = block.inputargs[argindex]
 # bit slow, but probably ok
@@ -136,8 +158,9 @@
 newres = self._merge_results(tuples, results, backedges)
 purecache[newkey] = newres
 
+# the simple case: the operation is really performed on the *same*
+# operands. This is the case if the key exists in all other caches
 for key, res in self.purecache.iteritems():
-# "straight" merge: the variable is in all other caches
 results = [res]
 for link, cache in tuples[1:]:
 val = cache.purecache.get(key, None)
@@ -228,10 +251,7 @@
 self.new_unions.union(res, op.result)
 
 def cse_block(self, block):
-def representative_arg(arg):
-if isinstance(arg, Variable):
-return self._var_rep(arg)
-return arg
+""" perform common subexpression elimination on block. """
 added_same_as = 0
 for opindex in range(len(block.operations) - block.canraise):
 op = block.operations[opindex]
@@ -239,7 +259,7 @@
 if op.opname == 'getfield':
 fieldname = op.args[1].value
 concretetype = op.args[0].concretetype
-arg0 = representative_arg(op.args[0])
+arg0 = self._var_rep(op.args[0])
 key = (arg0, op.args[0].concretetype, fieldname)
 res = self.heapcache.get(key, None)
 if res is not None:
@@ -250,20 +270,20 @@
 continue
 if op.opname == 'setfield':
 concretetype = op.args[0].concretetype
-target = representative_arg(op.args[0])
+target = self._var_re

[pypy-commit] pypy better-storesink: there's not actually a need for a separate union find, just use variable_families

2016-11-20 Thread cfbolz
Author: Carl Friedrich Bolz 
Branch: better-storesink
Changeset: r88502:6d0161df2581
Date: 2016-11-20 22:53 +0100
http://bitbucket.org/pypy/pypy/changeset/6d0161df2581/

Log:there's not actually a need for a separate union find, just use
variable_families

diff --git a/rpython/translator/backendopt/cse.py 
b/rpython/translator/backendopt/cse.py
--- a/rpython/translator/backendopt/cse.py
+++ b/rpython/translator/backendopt/cse.py
@@ -38,24 +38,21 @@
 return getattr(llop, op.opname).canfold
 
 class Cache(object):
-def __init__(self, variable_families, analyzer, new_unions=None,
+def __init__(self, variable_families, analyzer,
  purecache=None, heapcache=None):
 if purecache is None:
 purecache = {}
 if heapcache is None:
 heapcache = {}
-if new_unions is None:
-new_unions = unionfind.UnionFind()
 # (opname, concretetype of result, args) -> previous (life) result
 self.purecache = purecache
 self.heapcache = heapcache
 self.variable_families = variable_families
 self.analyzer = analyzer
-self.new_unions = new_unions
 
 def copy(self):
 return Cache(
-self.variable_families, self.analyzer, self.new_unions,
+self.variable_families, self.analyzer,
 self.purecache.copy(),
 self.heapcache.copy())
 
@@ -70,7 +67,6 @@
 # _var_rep.
 if not isinstance(var, Variable):
 return var
-var = self.new_unions.find_rep(var)
 return self.variable_families.find_rep(var)
 
 def _key_with_replacement(self, key, index, var):
@@ -211,7 +207,7 @@
 newres = self._merge_results(tuples, results, backedges)
 heapcache[key] = newres
 return Cache(
-self.variable_families, self.analyzer, self.new_unions,
+self.variable_families, self.analyzer,
 purecache, heapcache)
 
 def _clear_heapcache_for(self, concretetype, fieldname):
@@ -248,7 +244,9 @@
 assert op.result.concretetype == res.concretetype
 op.opname = 'same_as'
 op.args = [res]
-self.new_unions.union(res, op.result)
+# now that we know that the variables are the same, just merge them in
+# variable_families too
+self.variable_families.union(res, op.result)
 
 def cse_block(self, block):
 """ perform common subexpression elimination on block. """
@@ -313,8 +311,9 @@
 if op.opname == "cast_pointer":
 # cast_pointer is a pretty strange operation! it introduces
 # more aliases, that confuse the CSE pass. Therefore we unify
-# the two variables in new_unions, to improve the folding.
-self.new_unions.union(op.args[0], op.result)
+# the two variables in variable_families, to improve the
+# folding.
+self.variable_families.union(op.args[0], op.result)
 # don't do anything further
 continue
 if not can_fold_op:
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy better-storesink: remove the ability to deal with elidable calls: this is never useful anywhere

2016-11-20 Thread cfbolz
Author: Carl Friedrich Bolz 
Branch: better-storesink
Changeset: r88501:38b30af12c5c
Date: 2016-11-20 22:46 +0100
http://bitbucket.org/pypy/pypy/changeset/38b30af12c5c/

Log:remove the ability to deal with elidable calls: this is never useful
anywhere in pypy

diff --git a/rpython/translator/backendopt/cse.py 
b/rpython/translator/backendopt/cse.py
--- a/rpython/translator/backendopt/cse.py
+++ b/rpython/translator/backendopt/cse.py
@@ -301,18 +301,7 @@
 
 can_fold_op = can_fold(op)
 has_side_effects_op = has_side_effects(op)
-if op.opname == "direct_call":
-funcobj = op.args[0].value._obj
-func = getattr(funcobj, '_callable', None)
-elidable = getattr(func, "_elidable_function_", False)
-if elidable:
-# can't hash pointers, so use the graph directly
-key = ("direct_call", op.result.concretetype,
-   (funcobj.graph, ) +
-   tuple([self._var_rep(arg)
-   for arg in op.args[1:]]))
-can_fold_op = True
-elif can_fold_op:
+if can_fold_op:
 key = (op.opname, op.result.concretetype,
tuple([self._var_rep(arg) for arg in op.args]))
 
diff --git a/rpython/translator/backendopt/test/test_cse.py 
b/rpython/translator/backendopt/test/test_cse.py
--- a/rpython/translator/backendopt/test/test_cse.py
+++ b/rpython/translator/backendopt/test/test_cse.py
@@ -557,28 +557,6 @@
 return len(l)
 self.check(f, [int], fullopts=True, getarraysize=0)
 
-def test_remove_duplicate_elidable_call(self):
-@jit.elidable
-def p(x):
-return x + 1
-
-def f(x):
-return p(x) + p(x)
-
-self.check(f, [int], direct_call=1)
-
-def test_remove_duplicate_elidable_call_raises(self):
-@jit.elidable
-def p(x):
-return x + 1
-
-def f(x):
-try:
-return p(x) + p(x)
-except IndexError:
-return -5
-
-self.check(f, [int], direct_call=2)
 
 def fakevar(name='v'):
 var = Variable(name)
___
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit


[pypy-commit] pypy better-storesink: merge default

2016-11-20 Thread cfbolz
Author: Carl Friedrich Bolz 
Branch: better-storesink
Changeset: r88503:b612e2205d01
Date: 2016-11-20 22:54 +0100
http://bitbucket.org/pypy/pypy/changeset/b612e2205d01/

Log:merge default

diff too long, truncating to 2000 out of 84636 lines

diff --git a/.hgtags b/.hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -33,3 +33,4 @@
 050d84dd78997f021acf0e133934275d63547cc0 release-pypy2.7-v5.4.1
 050d84dd78997f021acf0e133934275d63547cc0 release-pypy2.7-v5.4.1
 0e2d9a73f5a1818d0245d75daccdbe21b2d5c3ef release-pypy2.7-v5.4.1
+aff251e543859ce4508159dd9f1a82a2f553de00 release-pypy2.7-v5.6.0
diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -44,15 +44,15 @@
   Matti Picus
   Alex Gaynor
   Philip Jenvey
+  Ronan Lamy
   Brian Kearns
-  Ronan Lamy
+  Richard Plangger
   Michael Hudson
   Manuel Jacob
   David Schneider
   Holger Krekel
   Christian Tismer
   Hakan Ardo
-  Richard Plangger
   Benjamin Peterson
   Anders Chrigstrom
   Eric van Riet Paap
@@ -68,8 +68,8 @@
   Niklaus Haldimann
   Camillo Bruni
   Laura Creighton
+  Romain Guillebert
   Toon Verwaest
-  Romain Guillebert
   Leonardo Santagada
   Seo Sanghyeon
   Ronny Pfannschmidt
@@ -89,7 +89,9 @@
   Ludovic Aubry
   Jacob Hallen
   Jason Creighton
+  Mark Young
   Alex Martelli
+  Spenser Bauman
   Michal Bendowski
   stian
   Jan de Mooij
@@ -100,20 +102,21 @@
   Stefan Schwarzer
   Valentino Volonghi
   Tomek Meka
+  Stefano Rivera
   Patrick Maupin
+  Devin Jeanpierre
   Bob Ippolito
   Bruno Gola
   David Malcolm
   Jean-Paul Calderone
-  Mark Young
   Timo Paulssen
+  Edd Barrett
   Squeaky
-  Devin Jeanpierre
   Marius Gedminas
   Alexandre Fayolle
   Simon Burton
-  Stefano Rivera
   Martin Matusiak
+  Nicolas Truessel
   Konstantin Lopuhin
   Wenzhu Man
   John Witulski
@@ -123,14 +126,12 @@
   Dario Bertini
   Mark Pearse
   Simon Cross
-  Edd Barrett
+  Jeremy Thurgood
   Andreas Stührk
   Tobias Pape
   Jean-Philippe St. Pierre
   Guido van Rossum
   Pavel Vinogradov
-  Spenser Bauman
-  Jeremy Thurgood
   Paweł Piotr Przeradowski
   Paul deGrandis
   Ilya Osadchiy
@@ -141,7 +142,6 @@
   tav
   Taavi Burns
   Georg Brandl
-  Nicolas Truessel
   Bert Freudenberg
   Stian Andreassen
   Wanja Saatkamp
@@ -156,19 +156,20 @@
   Preston Timmons
   David Ripton
   Jeff Terrace
+  Tim Felgentreff
   Dusty Phillips
   Lukas Renggli
   Guenter Jantzen
   William Leslie
   Ned Batchelder
-  Tim Felgentreff
   Anton Gulenko
   Amit Regmi
   Ben Young
-  Sergey Matyunin
+  Jasper Schulz
   Nicolas Chauvat
   Andrew Durdin
   Andrew Chambers
+  Sergey Matyunin
   Michael Schneider
   Nicholas Riley
   Jason Chu
@@ -184,16 +185,16 @@
   Jared Grubb
   Karl Bartel
   Wouter van Heyst
-  Sebastian Pawluś
   Brian Dorsey
   Victor Stinner
   Andrews Medina
+  Sebastian Pawluś
   Stuart Williams
-  Jasper Schulz
-  Christian Hudon
+  Daniel Patrick
+  Aaron Iles
   Toby Watson
   Antoine Pitrou
-  Aaron Iles
+  Christian Hudon
   Michael Cheng
   Justas Sadzevicius
   Gasper Zejn
@@ -201,8 +202,8 @@
   Stanislaw Halik
   Mikael Schönenberg
   Berkin Ilbeyi
+  Faye Zhao
   Elmo Mäntynen
-  Faye Zhao
   Jonathan David Riehl
   Anders Qvist
   Corbin Simpson
@@ -211,11 +212,12 @@
   Alex Perry
   Vaibhav Sood
   Alan McIntyre
+  Reuben Cummings
   Alexander Sedov
   p_ziesch...@yahoo.de
   Attila Gobi
-  Jasper.Schulz
   Christopher Pope
+  Aaron Gallagher
   Florin Papa
   Christian Tismer 
   Marc Abramowitz
@@ -232,7 +234,6 @@
   Gabriel
   Lukas Vacek
   Kunal Grover
-  Aaron Gallagher
   Andrew Dalke
   Sylvain Thenault
   Jakub Stasiak
@@ -255,6 +256,7 @@
   Philipp Rustemeuer
   Henrik Vendelbo
   Richard Lancaster
+  Yasir Suhail
   Dan Buch
   Miguel de Val Borro
   Artur Lisiecki
@@ -267,6 +269,7 @@
   Catalin Gabriel Manciu
   Tomo Cocoa
   Kim Jin Su
+  rafalgalczyn...@gmail.com
   Toni Mattis
   Amber Brown
   Lucas Stadler
@@ -294,9 +297,7 @@
   Michael Hudson-Doyle
   Anders Sigfridsson
   Nikolay Zinov
-  Yasir Suhail
   Jason Michalski
-  rafalgalczyn...@gmail.com
   Floris Bruynooghe
   Laurens Van Houtven
   Akira Li
@@ -310,9 +311,10 @@
   James Lan
   Volodymyr Vladymyrov
   shoma hosaka
-  Daniel Neuhäuser
   Ben Mather
   Niclas Olofsson
+  Matthew Miller
+  Rodrigo Araújo
   halgari
   Boglarka Vezer
   Chris Pressey
@@ -322,8 +324,9 @@
   Dinu Gherman
   Chris Lambacher
   coolbutusel...@gmail.com
+  Daniil Yarancev
   Jim Baker
-  Rodrigo Araújo
+  Dan Crosta
   Nikolaos-Digenis Karagiannis
   James Robert
   Armin Ronacher
@@ -337,32 +340,31 @@
   Tomer Chachamu
   Christopher Groskopf
   Asmo Soinio
-  Stefan Marr
   jiaaro
   Mads Kiilerich
-  opassembler.py
   Antony Lee
   Jason Madden
+  Daniel Neuh�user
+  reub...@gmail.com
   Yaroslav Fedevych
   Jim Hunziker
   Markus Unterwaditzer
   Even Wiik Thomassen
   jbs
   squeaky
-  Zearin
   soareschen
   Jonas Pfannschmidt
   Kurt Griffiths
   Mike Bayer
-  Matthew Miller
+  Stefan Marr
   Flavio Percoco
   Kristoffer Kleine
-  yasirs
   Michael Chermside
   Anna Ravencroft
   p

[pypy-commit] pypy better-storesink: remove the previous storesink (it's fully replaced by the new CSE pass)

2016-11-20 Thread cfbolz
Author: Carl Friedrich Bolz 
Branch: better-storesink
Changeset: r88504:986d2adef586
Date: 2016-11-20 22:59 +0100
http://bitbucket.org/pypy/pypy/changeset/986d2adef586/

Log:remove the previous storesink (it's fully replaced by the new CSE
pass)

diff --git a/rpython/translator/backendopt/storesink.py 
b/rpython/translator/backendopt/storesink.py
deleted file mode 100644
--- a/rpython/translator/backendopt/storesink.py
+++ /dev/null
@@ -1,97 +0,0 @@
-
-from rpython.rtyper.lltypesystem.lloperation import llop
-from rpython.flowspace.model import mkentrymap, Variable
-from rpython.translator.backendopt import removenoops
-from rpython.translator import simplify
-
-def has_side_effects(op):
-if op.opname == 'debug_assert' or op.opname == 'jit_force_virtualizable':
-return False
-try:
-return getattr(llop, op.opname).sideeffects
-except AttributeError:
-return True
-
-
-def storesink_graph(graph):
-""" remove superfluous getfields. use a super-local method: all non-join
-blocks inherit the heap information from their (single) predecessor
-"""
-added_some_same_as = False
-entrymap = mkentrymap(graph)
-
-# all merge blocks are starting points
-todo = [(block, None, None) for (block, prev_blocks) in 
entrymap.iteritems()
-if len(prev_blocks) > 1 or block is graph.startblock]
-
-visited = 0
-
-while todo:
-block, cache, inputlink = todo.pop()
-visited += 1
-if cache is None:
-cache = {}
-
-if block.operations:
-changed_block = _storesink_block(block, cache, inputlink)
-added_some_same_as = changed_block or added_some_same_as
-for link in block.exits:
-if len(entrymap[link.target]) == 1:
-new_cache = _translate_cache(cache, link)
-todo.append((link.target, new_cache, link))
-
-assert visited == len(entrymap)
-if added_some_same_as:
-removenoops.remove_same_as(graph)
-simplify.transform_dead_op_vars(graph)
-
-def _translate_cache(cache, link):
-if link.target.operations == (): # exit or except block:
-return {}
-block = link.target
-local_versions = {var1: var2 for var1, var2 in zip(link.args, 
block.inputargs)}
-def _translate_arg(arg):
-if isinstance(arg, Variable):
-res = local_versions.get(arg, None)
-if res is None:
-res = Variable(arg)
-res.concretetype = arg.concretetype
-link.args.append(arg)
-block.inputargs.append(res)
-local_versions[arg] = res
-return res
-else:
-return arg
-new_cache = {}
-for (var, field), res in cache.iteritems():
-if var in local_versions or not isinstance(var, Variable):
-new_cache[_translate_arg(var), field] = _translate_arg(res)
-return new_cache
-
-def _storesink_block(block, cache, inputlink):
-def clear_cache_for(cache, concretetype, fieldname):
-for k in cache.keys():
-if k[0].concretetype == concretetype and k[1] == fieldname:
-del cache[k]
-
-added_some_same_as = False
-for op in block.operations:
-if op.opname == 'getfield':
-tup = (op.args[0], op.args[1].value)
-res = cache.get(tup, None)
-if res is not None:
-op.opname = 'same_as'
-op.args = [res]
-added_some_same_as = True
-else:
-cache[tup] = op.result
-elif op.opname in ('setarrayitem', 'setinteriorfield', "malloc", 
"malloc_varsize"):
-pass
-elif op.opname == 'setfield':
-target = op.args[0]
-field = op.args[1].value
-clear_cache_for(cache, target.concretetype, field)
-cache[target, field] = op.args[2]
-elif has_side_effects(op):
-cache.clear()
-return added_some_same_as
diff --git a/rpython/translator/backendopt/test/test_storesink.py 
b/rpython/translator/backendopt/test/test_storesink.py
deleted file mode 100644
--- a/rpython/translator/backendopt/test/test_storesink.py
+++ /dev/null
@@ -1,167 +0,0 @@
-
-import py
-from rpython.translator.translator import TranslationContext, graphof
-from rpython.translator.backendopt.storesink import storesink_graph
-from rpython.translator.backendopt import removenoops
-from rpython.flowspace.model import checkgraph
-from rpython.conftest import option
-
-class TestStoreSink(object):
-def translate(self, func, argtypes):
-t = TranslationContext()
-t.buildannotator().build_types(func, argtypes)
-t.buildrtyper().specialize()
-return t
-
-def check(self, f, argtypes, no_getfields=0):
-t = self.translate(f, argtypes)
-getfields = 0
-graph = graphof(t, f)
-removenoops.remove_same_as(graph)
-checkgraph(graph