Author: Armin Rigo <[email protected]>
Branch: share-cpyext-cpython-api
Changeset: r84055:4c3d9f56e6d4
Date: 2016-04-30 11:32 +0100
http://bitbucket.org/pypy/pypy/changeset/4c3d9f56e6d4/
Log: hg merge default
diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst
--- a/pypy/doc/faq.rst
+++ b/pypy/doc/faq.rst
@@ -117,13 +117,22 @@
On which platforms does PyPy run?
---------------------------------
-PyPy is regularly and extensively tested on Linux machines. It mostly
+PyPy currently supports:
+
+ * **x86** machines on most common operating systems
+ (Linux 32/64 bits, Mac OS X 64 bits, Windows 32 bits, OpenBSD, FreeBSD),
+
+ * newer **ARM** hardware (ARMv6 or ARMv7, with VFPv3) running Linux,
+
+ * big- and little-endian variants of **PPC64** running Linux,
+
+ * **s390x** running Linux
+
+PyPy is regularly and extensively tested on Linux machines. It
works on Mac and Windows: it is tested there, but most of us are running
-Linux so fixes may depend on 3rd-party contributions. PyPy's JIT
-works on x86 (32-bit or 64-bit) and on ARM (ARMv6 or ARMv7).
-Support for POWER (64-bit) is stalled at the moment.
+Linux so fixes may depend on 3rd-party contributions.
-To bootstrap from sources, PyPy can use either CPython (2.6 or 2.7) or
+To bootstrap from sources, PyPy can use either CPython 2.7 or
another (e.g. older) PyPy. Cross-translation is not really supported:
e.g. to build a 32-bit PyPy, you need to have a 32-bit environment.
Cross-translation is only explicitly supported between a 32-bit Intel
diff --git a/pypy/interpreter/test/test_typedef.py
b/pypy/interpreter/test/test_typedef.py
--- a/pypy/interpreter/test/test_typedef.py
+++ b/pypy/interpreter/test/test_typedef.py
@@ -362,6 +362,45 @@
""")
assert seen == [1]
+ def test_mapdict_number_of_slots(self):
+ space = self.space
+ a, b, c = space.unpackiterable(space.appexec([], """():
+ class A(object):
+ pass
+ a = A()
+ a.x = 1
+ class B:
+ pass
+ b = B()
+ b.x = 1
+ class C(int):
+ pass
+ c = C(1)
+ c.x = 1
+ return a, b, c
+ """), 3)
+ assert not hasattr(a, "storage")
+ assert not hasattr(b, "storage")
+ assert hasattr(c, "storage")
+
+ def test_del(self):
+ space = self.space
+ a, b, c, d = space.unpackiterable(space.appexec([], """():
+ class A(object):
+ pass
+ class B(object):
+ def __del__(self):
+ pass
+ class F(file):
+ pass
+ class G(file):
+ def __del__(self):
+ pass
+ return A(), B(), F("xyz", "w"), G("ghi", "w")
+ """))
+ assert type(b).__base__ is type(a)
+ assert hasattr(c, "__del__")
+ assert type(d) is type(c)
class AppTestTypeDef:
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -103,43 +103,61 @@
# we need two subclasses of the app-level type, one to add mapdict, and then
one
# to add del to not slow down the GC.
-def get_unique_interplevel_subclass(config, cls, needsdel=False):
+def get_unique_interplevel_subclass(space, cls, needsdel=False):
"NOT_RPYTHON: initialization-time only"
if hasattr(cls, '__del__') and getattr(cls, "handle_del_manually", False):
needsdel = False
assert cls.typedef.acceptable_as_base_class
- key = config, cls, needsdel
+ key = space, cls, needsdel
try:
return _subclass_cache[key]
except KeyError:
# XXX can save a class if cls already has a __del__
- if needsdel:
- cls = get_unique_interplevel_subclass(config, cls, False)
- subcls = _getusercls(config, cls, needsdel)
+ keys = [key]
+ base_has_del = hasattr(cls, '__del__')
+ if base_has_del:
+ # if the base has a __del__, we only need one class
+ keys = [(space, cls, True), (space, cls, False)]
+ needsdel = True
+ elif needsdel:
+ cls = get_unique_interplevel_subclass(space, cls, False)
+ subcls = _getusercls(space, cls, needsdel)
assert key not in _subclass_cache
- _subclass_cache[key] = subcls
+ for key in keys:
+ _subclass_cache[key] = subcls
return subcls
get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo"
_subclass_cache = {}
-def _getusercls(config, cls, wants_del, reallywantdict=False):
+def _getusercls(space, cls, wants_del, reallywantdict=False):
from rpython.rlib import objectmodel
+ from pypy.objspace.std.objectobject import W_ObjectObject
+ from pypy.module.__builtin__.interp_classobj import W_InstanceObject
from pypy.objspace.std.mapdict import (BaseUserClassMapdict,
MapdictDictSupport, MapdictWeakrefSupport,
- _make_storage_mixin_size_n)
+ _make_storage_mixin_size_n, MapdictStorageMixin)
typedef = cls.typedef
name = cls.__name__ + "User"
- mixins_needed = [BaseUserClassMapdict, _make_storage_mixin_size_n()]
- if reallywantdict or not typedef.hasdict:
- # the type has no dict, mapdict to provide the dict
- mixins_needed.append(MapdictDictSupport)
- name += "Dict"
- if not typedef.weakrefable:
- # the type does not support weakrefs yet, mapdict to provide weakref
- # support
- mixins_needed.append(MapdictWeakrefSupport)
- name += "Weakrefable"
+ mixins_needed = []
+ copy_methods = []
+ mixins_needed = []
+ name = cls.__name__
+ if not cls.user_overridden_class:
+ if cls is W_ObjectObject or cls is W_InstanceObject:
+ mixins_needed.append(_make_storage_mixin_size_n())
+ else:
+ mixins_needed.append(MapdictStorageMixin)
+ copy_methods = [BaseUserClassMapdict]
+ if reallywantdict or not typedef.hasdict:
+ # the type has no dict, mapdict to provide the dict
+ copy_methods.append(MapdictDictSupport)
+ name += "Dict"
+ if not typedef.weakrefable:
+ # the type does not support weakrefs yet, mapdict to provide
weakref
+ # support
+ copy_methods.append(MapdictWeakrefSupport)
+ name += "Weakrefable"
if wants_del:
name += "Del"
parent_destructor = getattr(cls, '__del__', None)
@@ -148,14 +166,14 @@
parent_destructor(self)
def call_applevel_del(self):
assert isinstance(self, subcls)
- self.space.userdel(self)
+ space.userdel(self)
class Proto(object):
def __del__(self):
self.clear_all_weakrefs()
- self.enqueue_for_destruction(self.space, call_applevel_del,
+ self.enqueue_for_destruction(space, call_applevel_del,
'method __del__ of ')
if parent_destructor is not None:
- self.enqueue_for_destruction(self.space, call_parent_del,
+ self.enqueue_for_destruction(space, call_parent_del,
'internal destructor of ')
mixins_needed.append(Proto)
@@ -163,10 +181,17 @@
user_overridden_class = True
for base in mixins_needed:
objectmodel.import_from_mixin(base)
+ for copycls in copy_methods:
+ _copy_methods(copycls, subcls)
del subcls.base
subcls.__name__ = name
return subcls
+def _copy_methods(copycls, subcls):
+ for key, value in copycls.__dict__.items():
+ if (not key.startswith('__') or key == '__del__'):
+ setattr(subcls, key, value)
+
# ____________________________________________________________
diff --git a/pypy/module/__builtin__/interp_classobj.py
b/pypy/module/__builtin__/interp_classobj.py
--- a/pypy/module/__builtin__/interp_classobj.py
+++ b/pypy/module/__builtin__/interp_classobj.py
@@ -195,9 +195,9 @@
return
self.cls_without_del = _getusercls(
- space.config, W_InstanceObject, False, reallywantdict=True)
+ space, W_InstanceObject, False, reallywantdict=True)
self.cls_with_del = _getusercls(
- space.config, W_InstanceObject, True, reallywantdict=True)
+ space, W_InstanceObject, True, reallywantdict=True)
def class_descr_call(space, w_self, __args__):
diff --git a/pypy/module/cpyext/ndarrayobject.py
b/pypy/module/cpyext/ndarrayobject.py
--- a/pypy/module/cpyext/ndarrayobject.py
+++ b/pypy/module/cpyext/ndarrayobject.py
@@ -239,9 +239,7 @@
gufunctype = lltype.Ptr(ufuncs.GenericUfunc)
-# XXX single rffi.CArrayPtr(gufunctype) does not work, this does, is there
-# a problem with casting function pointers?
-@cpython_api([rffi.CArrayPtr(rffi.CArrayPtr(gufunctype)), rffi.VOIDP,
rffi.CCHARP, Py_ssize_t, Py_ssize_t,
+@cpython_api([rffi.CArrayPtr(gufunctype), rffi.VOIDP, rffi.CCHARP, Py_ssize_t,
Py_ssize_t,
Py_ssize_t, Py_ssize_t, rffi.CCHARP, rffi.CCHARP, Py_ssize_t,
rffi.CCHARP], PyObject, header=HEADER)
def PyUFunc_FromFuncAndDataAndSignature(space, funcs, data, types, ntypes,
@@ -256,7 +254,7 @@
funcs_w = [None] * ntypes
dtypes_w = [None] * ntypes * (nin + nout)
for i in range(ntypes):
- funcs_w[i] = ufuncs.W_GenericUFuncCaller(rffi.cast(gufunctype,
funcs[i]), data)
+ funcs_w[i] = ufuncs.W_GenericUFuncCaller(funcs[i], data)
for i in range(ntypes*(nin+nout)):
dtypes_w[i] = get_dtype_cache(space).dtypes_by_num[ord(types[i])]
w_funcs = space.newlist(funcs_w)
@@ -268,7 +266,7 @@
w_signature, w_identity, w_name, w_doc, stack_inputs=True)
return ufunc_generic
-@cpython_api([rffi.CArrayPtr(rffi.CArrayPtr(gufunctype)), rffi.VOIDP,
rffi.CCHARP, Py_ssize_t, Py_ssize_t,
+@cpython_api([rffi.CArrayPtr(gufunctype), rffi.VOIDP, rffi.CCHARP, Py_ssize_t,
Py_ssize_t,
Py_ssize_t, Py_ssize_t, rffi.CCHARP, rffi.CCHARP, Py_ssize_t],
PyObject, header=HEADER)
def PyUFunc_FromFuncAndData(space, funcs, data, types, ntypes,
nin, nout, identity, name, doc, check_return):
diff --git a/pypy/module/cpyext/test/test_cpyext.py
b/pypy/module/cpyext/test/test_cpyext.py
--- a/pypy/module/cpyext/test/test_cpyext.py
+++ b/pypy/module/cpyext/test/test_cpyext.py
@@ -254,13 +254,15 @@
class AppTestCpythonExtensionBase(LeakCheckingTest):
def setup_class(cls):
- cls.space.getbuiltinmodule("cpyext")
- from pypy.module.imp.importing import importhook
- importhook(cls.space, "os") # warm up reference counts
+ space = cls.space
+ space.getbuiltinmodule("cpyext")
+ # 'import os' to warm up reference counts
+ w_import = space.builtin.getdictvalue(space, '__import__')
+ space.call_function(w_import, space.wrap("os"))
#state = cls.space.fromcache(RefcountState) ZZZ
#state.non_heaptypes_w[:] = []
if not cls.runappdirect:
- cls.w_runappdirect = cls.space.wrap(cls.runappdirect)
+ cls.w_runappdirect = space.wrap(cls.runappdirect)
def setup_method(self, func):
@gateway.unwrap_spec(name=str)
diff --git a/pypy/module/cpyext/test/test_ndarrayobject.py
b/pypy/module/cpyext/test/test_ndarrayobject.py
--- a/pypy/module/cpyext/test/test_ndarrayobject.py
+++ b/pypy/module/cpyext/test/test_ndarrayobject.py
@@ -366,7 +366,7 @@
def test_ufunc(self):
if self.runappdirect:
from numpy import arange
- py.test.xfail('why does this segfault on cpython?')
+ py.test.xfail('segfaults on cpython: PyUFunc_API == NULL?')
else:
from _numpypy.multiarray import arange
mod = self.import_extension('foo', [
diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py
--- a/pypy/objspace/std/mapdict.py
+++ b/pypy/objspace/std/mapdict.py
@@ -277,7 +277,7 @@
def copy(self, obj):
result = Object()
result.space = self.space
- result._init_empty(self)
+ result._mapdict_init_empty(self)
return result
def length(self):
@@ -286,7 +286,7 @@
def set_terminator(self, obj, terminator):
result = Object()
result.space = self.space
- result._init_empty(terminator)
+ result._mapdict_init_empty(terminator)
return result
def remove_dict_entries(self, obj):
@@ -304,7 +304,7 @@
def materialize_r_dict(self, space, obj, dict_w):
result = Object()
result.space = space
- result._init_empty(self.devolved_dict_terminator)
+ result._mapdict_init_empty(self.devolved_dict_terminator)
return result
@@ -417,11 +417,6 @@
def __repr__(self):
return "<PlainAttribute %s %s %s %r>" % (self.name, self.index,
self.storageindex, self.back)
-def _become(w_obj, new_obj):
- # this is like the _become method, really, but we cannot use that due to
- # RPython reasons
- w_obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
-
class MapAttrCache(object):
def __init__(self, space):
SIZE = 1 << space.config.objspace.std.methodcachesizeexp
@@ -457,22 +452,12 @@
# everything that's needed to use mapdict for a user subclass at all.
# This immediately makes slots possible.
- # assumes presence of _init_empty, _mapdict_read_storage,
+ # assumes presence of _get_mapdict_map, _set_mapdict_map
+ # _mapdict_init_empty, _mapdict_read_storage,
# _mapdict_write_storage, _mapdict_storage_length,
# _set_mapdict_storage_and_map
# _____________________________________________
- # methods needed for mapdict
-
- def _become(self, new_obj):
- self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
-
- def _get_mapdict_map(self):
- return jit.promote(self.map)
- def _set_mapdict_map(self, map):
- self.map = map
-
- # _____________________________________________
# objspace interface
# class access
@@ -482,15 +467,14 @@
def setclass(self, space, w_cls):
new_obj = self._get_mapdict_map().set_terminator(self,
w_cls.terminator)
- self._become(new_obj)
+ self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
def user_setup(self, space, w_subtype):
from pypy.module.__builtin__.interp_classobj import W_InstanceObject
- self.space = space
assert (not self.typedef.hasdict or
isinstance(w_subtype.terminator, NoDictTerminator) or
self.typedef is W_InstanceObject.typedef)
- self._init_empty(w_subtype.terminator)
+ self._mapdict_init_empty(w_subtype.terminator)
# methods needed for slots
@@ -508,7 +492,7 @@
new_obj = self._get_mapdict_map().delete(self, "slot", index)
if new_obj is None:
return False
- self._become(new_obj)
+ self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
return True
@@ -549,7 +533,7 @@
new_obj = self._get_mapdict_map().delete(self, attrname, DICT)
if new_obj is None:
return False
- self._become(new_obj)
+ self._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
return True
def getdict(self, space):
@@ -599,7 +583,12 @@
assert flag
class MapdictStorageMixin(object):
- def _init_empty(self, map):
+ def _get_mapdict_map(self):
+ return jit.promote(self.map)
+ def _set_mapdict_map(self, map):
+ self.map = map
+
+ def _mapdict_init_empty(self, map):
from rpython.rlib.debug import make_sure_not_resized
self.map = map
self.storage = make_sure_not_resized([None] * map.size_estimate())
@@ -613,6 +602,7 @@
def _mapdict_storage_length(self):
return len(self.storage)
+
def _set_mapdict_storage_and_map(self, storage, map):
self.storage = storage
self.map = map
@@ -643,7 +633,11 @@
rangenmin1 = unroll.unrolling_iterable(range(nmin1))
valnmin1 = "_value%s" % nmin1
class subcls(object):
- def _init_empty(self, map):
+ def _get_mapdict_map(self):
+ return jit.promote(self.map)
+ def _set_mapdict_map(self, map):
+ self.map = map
+ def _mapdict_init_empty(self, map):
for i in rangenmin1:
setattr(self, "_value%s" % i, None)
setattr(self, valnmin1, erase_item(None))
@@ -731,7 +725,7 @@
def get_empty_storage(self):
w_result = Object()
terminator = self.space.fromcache(get_terminator_for_dicts)
- w_result._init_empty(terminator)
+ w_result._mapdict_init_empty(terminator)
return self.erase(w_result)
def switch_to_object_strategy(self, w_dict):
@@ -811,7 +805,7 @@
def clear(self, w_dict):
w_obj = self.unerase(w_dict.dstorage)
new_obj = w_obj._get_mapdict_map().remove_dict_entries(w_obj)
- _become(w_obj, new_obj)
+ w_obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
def popitem(self, w_dict):
curr = self.unerase(w_dict.dstorage)._get_mapdict_map().search(DICT)
@@ -836,7 +830,7 @@
def materialize_r_dict(space, obj, dict_w):
map = obj._get_mapdict_map()
new_obj = map.materialize_r_dict(space, obj, dict_w)
- _become(obj, new_obj)
+ obj._set_mapdict_storage_and_map(new_obj.storage, new_obj.map)
class MapDictIteratorKeys(BaseKeyIterator):
def __init__(self, space, strategy, dictimplementation):
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -358,7 +358,7 @@
cls = cls.typedef.applevel_subclasses_base
#
subcls = get_unique_interplevel_subclass(
- self.config, cls, w_subtype.needsdel)
+ self, cls, w_subtype.needsdel)
instance = instantiate(subcls)
assert isinstance(instance, cls)
instance.user_setup(self, w_subtype)
diff --git a/rpython/rtyper/lltypesystem/ll2ctypes.py
b/rpython/rtyper/lltypesystem/ll2ctypes.py
--- a/rpython/rtyper/lltypesystem/ll2ctypes.py
+++ b/rpython/rtyper/lltypesystem/ll2ctypes.py
@@ -231,17 +231,7 @@
assert max_n >= 0
ITEM = A.OF
ctypes_item = get_ctypes_type(ITEM, delayed_builders)
- # Python 2.5 ctypes can raise OverflowError on 64-bit builds
- for n in [maxint, 2**31]:
- MAX_SIZE = n/64
- try:
- PtrType = ctypes.POINTER(MAX_SIZE * ctypes_item)
- except (OverflowError, AttributeError), e:
- pass # ^^^ bah, blame ctypes
- else:
- break
- else:
- raise e
+ ctypes_item_ptr = ctypes.POINTER(ctypes_item)
class CArray(ctypes.Structure):
if is_emulated_long:
@@ -265,35 +255,9 @@
bigarray.length = n
return bigarray
- _ptrtype = None
-
- @classmethod
- def _get_ptrtype(cls):
- if cls._ptrtype:
- return cls._ptrtype
- # ctypes can raise OverflowError on 64-bit builds
- # on windows it raises AttributeError even for 2**31 (_length_
missing)
- if _MS_WINDOWS:
- other_limit = 2**31-1
- else:
- other_limit = 2**31
- for n in [maxint, other_limit]:
- cls.MAX_SIZE = n / ctypes.sizeof(ctypes_item)
- try:
- cls._ptrtype = ctypes.POINTER(cls.MAX_SIZE * ctypes_item)
- except (OverflowError, AttributeError), e:
- pass
- else:
- break
- else:
- raise e
- return cls._ptrtype
-
def _indexable(self, index):
- PtrType = self._get_ptrtype()
- assert index + 1 < self.MAX_SIZE
- p = ctypes.cast(ctypes.pointer(self.items), PtrType)
- return p.contents
+ p = ctypes.cast(self.items, ctypes_item_ptr)
+ return p
def _getitem(self, index, boundscheck=True):
if boundscheck:
@@ -1045,12 +1009,22 @@
container = _array_of_known_length(T.TO)
container._storage = type(cobj)(cobj.contents)
elif isinstance(T.TO, lltype.FuncType):
+ # cobj is a CFunctionType object. We naively think
+ # that it should be a function pointer. No no no. If
+ # it was read out of an array, say, then it is a *pointer*
+ # to a function pointer. In other words, the read doesn't
+ # read anything, it just takes the address of the function
+ # pointer inside the array. If later the array is modified
+ # or goes out of scope, then we crash. CTypes is fun.
+ # It works if we cast it now to an int and back.
cobjkey = intmask(ctypes.cast(cobj, ctypes.c_void_p).value)
if cobjkey in _int2obj:
container = _int2obj[cobjkey]
else:
+ name = getattr(cobj, '__name__', '?')
+ cobj = ctypes.cast(cobjkey, type(cobj))
_callable = get_ctypes_trampoline(T.TO, cobj)
- return lltype.functionptr(T.TO, getattr(cobj, '__name__',
'?'),
+ return lltype.functionptr(T.TO, name,
_callable=_callable)
elif isinstance(T.TO, lltype.OpaqueType):
if T == llmemory.GCREF:
diff --git a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py
b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py
--- a/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py
+++ b/rpython/rtyper/lltypesystem/test/test_ll2ctypes.py
@@ -1405,6 +1405,45 @@
a2 = ctypes2lltype(lltype.Ptr(A), lltype2ctypes(a))
assert a2._obj.getitem(0)._obj._parentstructure() is a2._obj
+ def test_array_of_function_pointers(self):
+ c_source = py.code.Source(r"""
+ #include "src/precommondefs.h"
+ #include <stdio.h>
+
+ typedef int(*funcptr_t)(void);
+ static int forty_two(void) { return 42; }
+ static int forty_three(void) { return 43; }
+ static funcptr_t testarray[2];
+ RPY_EXPORTED void runtest(void cb(funcptr_t *)) {
+ testarray[0] = &forty_two;
+ testarray[1] = &forty_three;
+ fprintf(stderr, "&forty_two = %p\n", testarray[0]);
+ fprintf(stderr, "&forty_three = %p\n", testarray[1]);
+ cb(testarray);
+ testarray[0] = 0;
+ testarray[1] = 0;
+ }
+ """)
+ eci = ExternalCompilationInfo(include_dirs=[cdir],
+ separate_module_sources=[c_source])
+
+ PtrF = lltype.Ptr(lltype.FuncType([], rffi.INT))
+ ArrayPtrF = rffi.CArrayPtr(PtrF)
+ CALLBACK = rffi.CCallback([ArrayPtrF], lltype.Void)
+
+ runtest = rffi.llexternal('runtest', [CALLBACK], lltype.Void,
+ compilation_info=eci)
+ seen = []
+
+ def callback(testarray):
+ seen.append(testarray[0]) # read a PtrF out of testarray
+ seen.append(testarray[1])
+
+ runtest(callback)
+ assert seen[0]() == 42
+ assert seen[1]() == 43
+
+
class TestPlatform(object):
def test_lib_on_libpaths(self):
from rpython.translator.platform import platform
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit