Author: Devin Jeanpierre <[email protected]>
Branch: cpyext-old-buffers
Changeset: r84500:c005f7790fdb
Date: 2016-05-16 15:28 -0700
http://bitbucket.org/pypy/pypy/changeset/c005f7790fdb/
Log: hg merge default # --> cpyext-old-buffers
diff --git a/lib-python/2.7/test/test_sys_settrace.py
b/lib-python/2.7/test/test_sys_settrace.py
--- a/lib-python/2.7/test/test_sys_settrace.py
+++ b/lib-python/2.7/test/test_sys_settrace.py
@@ -328,8 +328,8 @@
def test_13_genexp(self):
if self.using_gc:
+ gc.enable()
test_support.gc_collect()
- gc.enable()
try:
self.run_test(generator_example)
# issue1265: if the trace function contains a generator,
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -89,3 +89,19 @@
Use the new rgc.FinalizerQueue mechanism to clean up the handling of
``__del__`` methods. Fixes notably issue #2287. (All RPython
subclasses of W_Root need to use FinalizerQueue now.)
+
+.. branch: ufunc-outer
+
+Implement ufunc.outer on numpypy
+
+.. branch: verbose-imports
+
+Support ``pypy -v``: verbose imports. It does not log as much as
+cpython, but it should be enough to help when debugging package layout
+problems.
+
+.. branch: cpyext-macros-cast
+
+Fix some warnings when compiling CPython C extension modules
+
+.. branch: syntax_fix
diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
--- a/pypy/interpreter/app_main.py
+++ b/pypy/interpreter/app_main.py
@@ -2,7 +2,7 @@
# This is pure Python code that handles the main entry point into "pypy".
# See test/test_app_main.
-# Missing vs CPython: -d, -t, -v, -x, -3
+# Missing vs CPython: -d, -t, -x, -3
USAGE1 = __doc__ = """\
Options and arguments (and corresponding environment variables):
-B : don't write .py[co] files on import; also PYTHONDONTWRITEBYTECODE=x
@@ -19,6 +19,8 @@
-s : don't add user site directory to sys.path; also PYTHONNOUSERSITE
-S : don't imply 'import site' on initialization
-u : unbuffered binary stdout and stderr; also PYTHONUNBUFFERED=x
+-v : verbose (trace import statements); also PYTHONVERBOSE=x
+ can be supplied multiple times to increase verbosity
-V : print the Python version number and exit (also --version)
-W arg : warning control; arg is action:message:category:module:lineno
also PYTHONWARNINGS=arg
@@ -529,6 +531,7 @@
warnoptions,
unbuffered,
ignore_environment,
+ verbose,
**ignored):
# with PyPy in top of CPython we can only have around 100
# but we need more in the translated PyPy for the compiler package
@@ -663,6 +666,8 @@
inspect = True
else:
# If not interactive, just read and execute stdin normally.
+ if verbose:
+ print_banner(not no_site)
@hidden_applevel
def run_it():
co_stdin = compile(sys.stdin.read(), '<stdin>', 'exec',
@@ -724,10 +729,10 @@
return status
def print_banner(copyright):
- print 'Python %s on %s' % (sys.version, sys.platform)
+ print >> sys.stderr, 'Python %s on %s' % (sys.version, sys.platform)
if copyright:
- print ('Type "help", "copyright", "credits" or '
- '"license" for more information.')
+ print >> sys.stderr, ('Type "help", "copyright", "credits" or '
+ '"license" for more information.')
STDLIB_WARNING = """\
debug: WARNING: Library path not found, using compiled-in sys.path.
diff --git a/pypy/interpreter/executioncontext.py
b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -466,6 +466,13 @@
list = self.fired_actions
if list is not None:
self.fired_actions = None
+ # NB. in case there are several actions, we reset each
+ # 'action._fired' to false only when we're about to call
+ # 'action.perform()'. This means that if
+ # 'action.fire()' happens to be called any time before
+ # the corresponding perform(), the fire() has no
+ # effect---which is the effect we want, because
+ # perform() will be called anyway.
for action in list:
action._fired = False
action.perform(ec, frame)
diff --git a/pypy/module/__pypy__/interp_magic.py
b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -51,6 +51,11 @@
space.newint(cache.misses.get(name, 0))])
def builtinify(space, w_func):
+ """To implement at app-level modules that are, in CPython,
+ implemented in C: this decorator protects a function from being ever
+ bound like a method. Useful because some tests do things like put
+ a "built-in" function on a class and access it via the instance.
+ """
from pypy.interpreter.function import Function, BuiltinFunction
func = space.interp_w(Function, w_func)
bltn = BuiltinFunction(func)
diff --git a/pypy/module/_winreg/interp_winreg.py
b/pypy/module/_winreg/interp_winreg.py
--- a/pypy/module/_winreg/interp_winreg.py
+++ b/pypy/module/_winreg/interp_winreg.py
@@ -14,11 +14,13 @@
space.wrap(message)]))
class W_HKEY(W_Root):
- def __init__(self, hkey):
+ def __init__(self, space, hkey):
self.hkey = hkey
+ self.space = space
+ self.register_finalizer(space)
- def descr_del(self, space):
- self.Close(space)
+ def _finalize_(self):
+ self.Close(self.space)
def as_int(self):
return rffi.cast(rffi.SIZE_T, self.hkey)
@@ -64,7 +66,7 @@
@unwrap_spec(key=int)
def new_HKEY(space, w_subtype, key):
hkey = rffi.cast(rwinreg.HKEY, key)
- return space.wrap(W_HKEY(hkey))
+ return space.wrap(W_HKEY(space, hkey))
descr_HKEY_new = interp2app(new_HKEY)
W_HKEY.typedef = TypeDef(
@@ -91,7 +93,6 @@
__int__ - Converting a handle to an integer returns the Win32 handle.
__cmp__ - Handle objects are compared using the handle value.""",
__new__ = descr_HKEY_new,
- __del__ = interp2app(W_HKEY.descr_del),
__repr__ = interp2app(W_HKEY.descr_repr),
__int__ = interp2app(W_HKEY.descr_int),
__nonzero__ = interp2app(W_HKEY.descr_nonzero),
@@ -480,7 +481,7 @@
ret = rwinreg.RegCreateKey(hkey, subkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'CreateKey')
- return space.wrap(W_HKEY(rethkey[0]))
+ return space.wrap(W_HKEY(space, rethkey[0]))
@unwrap_spec(subkey=str, res=int, sam=rffi.r_uint)
def CreateKeyEx(space, w_hkey, subkey, res=0, sam=rwinreg.KEY_WRITE):
@@ -502,7 +503,7 @@
lltype.nullptr(rwin32.LPDWORD.TO))
if ret != 0:
raiseWindowsError(space, ret, 'CreateKeyEx')
- return space.wrap(W_HKEY(rethkey[0]))
+ return space.wrap(W_HKEY(space, rethkey[0]))
@unwrap_spec(subkey=str)
def DeleteKey(space, w_hkey, subkey):
@@ -549,7 +550,7 @@
ret = rwinreg.RegOpenKeyEx(hkey, subkey, res, sam, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegOpenKeyEx')
- return space.wrap(W_HKEY(rethkey[0]))
+ return space.wrap(W_HKEY(space, rethkey[0]))
@unwrap_spec(index=int)
def EnumValue(space, w_hkey, index):
@@ -688,7 +689,7 @@
ret = rwinreg.RegConnectRegistry(machine, hkey, rethkey)
if ret != 0:
raiseWindowsError(space, ret, 'RegConnectRegistry')
- return space.wrap(W_HKEY(rethkey[0]))
+ return space.wrap(W_HKEY(space, rethkey[0]))
@unwrap_spec(source=unicode)
def ExpandEnvironmentStrings(space, source):
diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py
--- a/pypy/module/cpyext/api.py
+++ b/pypy/module/cpyext/api.py
@@ -203,46 +203,46 @@
# id. Invariant: this variable always contain 0 when the PyPy GIL is
# released. It should also contain 0 when regular RPython code
# executes. In non-cpyext-related code, it will thus always be 0.
-#
+#
# **make_generic_cpy_call():** RPython to C, with the GIL held. Before
# the call, must assert that the global variable is 0 and set the
# current thread identifier into the global variable. After the call,
# assert that the global variable still contains the current thread id,
# and reset it to 0.
-#
+#
# **make_wrapper():** C to RPython; by default assume that the GIL is
# held, but accepts gil="acquire", "release", "around",
# "pygilstate_ensure", "pygilstate_release".
-#
+#
# When a wrapper() is called:
-#
+#
# * "acquire": assert that the GIL is not currently held, i.e. the
# global variable does not contain the current thread id (otherwise,
# deadlock!). Acquire the PyPy GIL. After we acquired it, assert
# that the global variable is 0 (it must be 0 according to the
# invariant that it was 0 immediately before we acquired the GIL,
# because the GIL was released at that point).
-#
+#
# * gil=None: we hold the GIL already. Assert that the current thread
# identifier is in the global variable, and replace it with 0.
-#
+#
# * "pygilstate_ensure": if the global variable contains the current
# thread id, replace it with 0 and set the extra arg to 0. Otherwise,
# do the "acquire" and set the extra arg to 1. Then we'll call
# pystate.py:PyGILState_Ensure() with this extra arg, which will do
# the rest of the logic.
-#
+#
# When a wrapper() returns, first assert that the global variable is
# still 0, and then:
-#
+#
# * "release": release the PyPy GIL. The global variable was 0 up to
# and including at the point where we released the GIL, but afterwards
# it is possible that the GIL is acquired by a different thread very
# quickly.
-#
+#
# * gil=None: we keep holding the GIL. Set the current thread
# identifier into the global variable.
-#
+#
# * "pygilstate_release": if the argument is PyGILState_UNLOCKED,
# release the PyPy GIL; otherwise, set the current thread identifier
# into the global variable. The rest of the logic of
@@ -254,7 +254,7 @@
cpyext_namespace = NameManager('cpyext_')
-class ApiFunction:
+class ApiFunction(object):
def __init__(self, argtypes, restype, callable, error=_NOT_SPECIFIED,
c_name=None, gil=None, result_borrowed=False,
result_is_ll=False):
self.argtypes = argtypes
@@ -292,11 +292,48 @@
def get_wrapper(self, space):
wrapper = getattr(self, '_wrapper', None)
if wrapper is None:
- wrapper = make_wrapper(space, self.callable, self.gil)
- self._wrapper = wrapper
- wrapper.relax_sig_check = True
- if self.c_name is not None:
- wrapper.c_name = cpyext_namespace.uniquename(self.c_name)
+ wrapper = self._wrapper = self._make_wrapper(space)
+ return wrapper
+
+ # Make the wrapper for the cases (1) and (2)
+ def _make_wrapper(self, space):
+ "NOT_RPYTHON"
+ # This logic is obscure, because we try to avoid creating one
+ # big wrapper() function for every callable. Instead we create
+ # only one per "signature".
+
+ argtypesw = zip(self.argtypes,
+ [_name.startswith("w_") for _name in self.argnames])
+ error_value = getattr(self, "error_value", CANNOT_FAIL)
+ if (isinstance(self.restype, lltype.Ptr)
+ and error_value is not CANNOT_FAIL):
+ assert lltype.typeOf(error_value) == self.restype
+ assert not error_value # only support error=NULL
+ error_value = 0 # because NULL is not hashable
+
+ if self.result_is_ll:
+ result_kind = "L"
+ elif self.result_borrowed:
+ result_kind = "B" # note: 'result_borrowed' is ignored if we
also
+ else: # say 'result_is_ll=True' (in this case it's
+ result_kind = "." # up to you to handle refcounting anyway)
+
+ signature = (tuple(argtypesw),
+ self.restype,
+ result_kind,
+ error_value,
+ self.gil)
+
+ cache = space.fromcache(WrapperCache)
+ try:
+ wrapper_gen = cache.wrapper_gens[signature]
+ except KeyError:
+ wrapper_gen = WrapperGen(space, signature)
+ cache.wrapper_gens[signature] = wrapper_gen
+ wrapper = wrapper_gen.make_wrapper(self.callable)
+ wrapper.relax_sig_check = True
+ if self.c_name is not None:
+ wrapper.c_name = cpyext_namespace.uniquename(self.c_name)
return wrapper
DEFAULT_HEADER = 'pypy_decl.h'
@@ -373,7 +410,16 @@
arg = rffi.cast(ARG, as_pyobj(space, input_arg))
else:
arg = rffi.cast(ARG, input_arg)
- elif is_PyObject(ARG) and is_wrapped:
+ elif ARG == rffi.VOIDP and not is_wrapped:
+ # unlike is_PyObject case above, we allow any kind of
+ # argument -- just, if it's an object, we assume the
+ # caller meant for it to become a PyObject*.
+ if input_arg is None or isinstance(input_arg, W_Root):
+ keepalives += (input_arg,)
+ arg = rffi.cast(ARG, as_pyobj(space, input_arg))
+ else:
+ arg = rffi.cast(ARG, input_arg)
+ elif (is_PyObject(ARG) or ARG == rffi.VOIDP) and
is_wrapped:
# build a W_Root, possibly from a 'PyObject *'
if is_pyobj(input_arg):
arg = from_ref(space, input_arg)
@@ -683,92 +729,61 @@
def __init__(self, space):
self.space = space
self.wrapper_gens = {} # {signature: WrapperGen()}
- self.stats = [0, 0]
class WrapperGen(object):
wrapper_second_level = None
+ A = lltype.Array(lltype.Char)
def __init__(self, space, signature):
self.space = space
self.signature = signature
- self.callable2name = []
def make_wrapper(self, callable):
- self.callable2name.append((callable, callable.__name__))
if self.wrapper_second_level is None:
self.wrapper_second_level = make_wrapper_second_level(
- self.space, self.callable2name, *self.signature)
+ self.space, *self.signature)
wrapper_second_level = self.wrapper_second_level
+ name = callable.__name__
+ pname = lltype.malloc(self.A, len(name), flavor='raw', immortal=True)
+ for i in range(len(name)):
+ pname[i] = name[i]
+
def wrapper(*args):
# no GC here, not even any GC object
- args += (callable,)
- return wrapper_second_level(*args)
+ return wrapper_second_level(callable, pname, *args)
wrapper.__name__ = "wrapper for %r" % (callable, )
return wrapper
-# Make the wrapper for the cases (1) and (2)
-def make_wrapper(space, callable, gil=None):
- "NOT_RPYTHON"
- # This logic is obscure, because we try to avoid creating one
- # big wrapper() function for every callable. Instead we create
- # only one per "signature".
- argnames = callable.api_func.argnames
- argtypesw = zip(callable.api_func.argtypes,
- [_name.startswith("w_") for _name in argnames])
- error_value = getattr(callable.api_func, "error_value", CANNOT_FAIL)
- if (isinstance(callable.api_func.restype, lltype.Ptr)
- and error_value is not CANNOT_FAIL):
- assert lltype.typeOf(error_value) == callable.api_func.restype
- assert not error_value # only support error=NULL
- error_value = 0 # because NULL is not hashable
-
- if callable.api_func.result_is_ll:
- result_kind = "L"
- elif callable.api_func.result_borrowed:
- result_kind = "B" # note: 'result_borrowed' is ignored if we also
- else: # say 'result_is_ll=True' (in this case it's
- result_kind = "." # up to you to handle refcounting anyway)
-
- signature = (tuple(argtypesw),
- callable.api_func.restype,
- result_kind,
- error_value,
- gil)
-
- cache = space.fromcache(WrapperCache)
- cache.stats[1] += 1
- try:
- wrapper_gen = cache.wrapper_gens[signature]
- except KeyError:
- #print signature
- wrapper_gen = cache.wrapper_gens[signature] = WrapperGen(space,
- signature)
- cache.stats[0] += 1
- #print 'Wrapper cache [wrappers/total]:', cache.stats
- return wrapper_gen.make_wrapper(callable)
-
+@dont_inline
+def _unpack_name(pname):
+ return ''.join([pname[i] for i in range(len(pname))])
@dont_inline
def deadlock_error(funcname):
+ funcname = _unpack_name(funcname)
fatalerror_notb("GIL deadlock detected when a CPython C extension "
"module calls '%s'" % (funcname,))
@dont_inline
def no_gil_error(funcname):
+ funcname = _unpack_name(funcname)
fatalerror_notb("GIL not held when a CPython C extension "
"module calls '%s'" % (funcname,))
@dont_inline
def not_supposed_to_fail(funcname):
- raise SystemError("The function '%s' was not supposed to fail"
- % (funcname,))
+ funcname = _unpack_name(funcname)
+ print "Error in cpyext, CPython compatibility layer:"
+ print "The function", funcname, "was not supposed to fail"
+ raise SystemError
@dont_inline
def unexpected_exception(funcname, e, tb):
+ funcname = _unpack_name(funcname)
print 'Fatal error in cpyext, CPython compatibility layer,
calling',funcname
print 'Either report a bug or consider not using this particular extension'
if not we_are_translated():
@@ -784,7 +799,7 @@
pypy_debug_catch_fatal_exception()
assert False
-def make_wrapper_second_level(space, callable2name, argtypesw, restype,
+def make_wrapper_second_level(space, argtypesw, restype,
result_kind, error_value, gil):
from rpython.rlib import rgil
argtypes_enum_ui = unrolling_iterable(enumerate(argtypesw))
@@ -807,29 +822,19 @@
def invalid(err):
"NOT_RPYTHON: translation-time crash if this ends up being called"
raise ValueError(err)
- invalid.__name__ = 'invalid_%s' % (callable2name[0][1],)
- def nameof(callable):
- for c, n in callable2name:
- if c is callable:
- return n
- return '<unknown function>'
- nameof._dont_inline_ = True
-
- def wrapper_second_level(*args):
+ def wrapper_second_level(callable, pname, *args):
from pypy.module.cpyext.pyobject import make_ref, from_ref, is_pyobj
from pypy.module.cpyext.pyobject import as_pyobj
# we hope that malloc removal removes the newtuple() that is
# inserted exactly here by the varargs specializer
- callable = args[-1]
- args = args[:-1]
# see "Handling of the GIL" above (careful, we don't have the GIL here)
tid = rthread.get_or_make_ident()
_gil_auto = (gil_auto_workaround and cpyext_glob_tid_ptr[0] != tid)
if gil_acquire or _gil_auto:
if cpyext_glob_tid_ptr[0] == tid:
- deadlock_error(nameof(callable))
+ deadlock_error(pname)
rgil.acquire()
assert cpyext_glob_tid_ptr[0] == 0
elif pygilstate_ensure:
@@ -842,7 +847,7 @@
args += (pystate.PyGILState_UNLOCKED,)
else:
if cpyext_glob_tid_ptr[0] != tid:
- no_gil_error(nameof(callable))
+ no_gil_error(pname)
cpyext_glob_tid_ptr[0] = 0
rffi.stackcounter.stacks_counter += 1
@@ -859,6 +864,10 @@
if is_PyObject(typ) and is_wrapped:
assert is_pyobj(arg)
arg_conv = from_ref(space, rffi.cast(PyObject, arg))
+ elif typ == rffi.VOIDP and is_wrapped:
+ # Many macros accept a void* so that one can pass a
+ # PyObject* or a PySomeSubtype*.
+ arg_conv = from_ref(space, rffi.cast(PyObject, arg))
else:
arg_conv = arg
boxed_args += (arg_conv, )
@@ -888,7 +897,7 @@
if failed:
if error_value is CANNOT_FAIL:
- raise not_supposed_to_fail(nameof(callable))
+ raise not_supposed_to_fail(pname)
retval = error_value
elif is_PyObject(restype):
@@ -908,7 +917,7 @@
retval = rffi.cast(restype, result)
except Exception as e:
- unexpected_exception(nameof(callable), e, tb)
+ unexpected_exception(pname, e, tb)
return fatal_value
assert lltype.typeOf(retval) == restype
@@ -1019,7 +1028,7 @@
structindex = {}
for header, header_functions in FUNCTIONS_BY_HEADER.iteritems():
for name, func in header_functions.iteritems():
- if not func:
+ if not func:
# added only for the macro, not the decl
continue
restype, args = c_function_signature(db, func)
@@ -1033,7 +1042,7 @@
RPY_EXTERN struct PyPyAPI* pypyAPI = &_pypyAPI;
""" % dict(members=structmembers)
- functions = generate_decls_and_callbacks(db, export_symbols,
+ functions = generate_decls_and_callbacks(db, export_symbols,
prefix='cpyexttest')
global_objects = []
@@ -1415,7 +1424,7 @@
generate_macros(export_symbols, prefix=prefix)
- functions = generate_decls_and_callbacks(db, [], api_struct=False,
+ functions = generate_decls_and_callbacks(db, [], api_struct=False,
prefix=prefix)
code = "#include <Python.h>\n"
if use_micronumpy:
@@ -1471,7 +1480,7 @@
if not func:
continue
newname = mangle_name('PyPy', name) or name
- deco = entrypoint_lowlevel("cpyext", func.argtypes, newname,
+ deco = entrypoint_lowlevel("cpyext", func.argtypes, newname,
relax=True)
deco(func.get_wrapper(space))
diff --git a/pypy/module/cpyext/cdatetime.py b/pypy/module/cpyext/cdatetime.py
--- a/pypy/module/cpyext/cdatetime.py
+++ b/pypy/module/cpyext/cdatetime.py
@@ -178,67 +178,67 @@
# Accessors
-@cpython_api([PyDateTime_Date], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_GET_YEAR(space, w_obj):
"""Return the year, as a positive int.
"""
return space.int_w(space.getattr(w_obj, space.wrap("year")))
-@cpython_api([PyDateTime_Date], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_GET_MONTH(space, w_obj):
"""Return the month, as an int from 1 through 12.
"""
return space.int_w(space.getattr(w_obj, space.wrap("month")))
-@cpython_api([PyDateTime_Date], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_GET_DAY(space, w_obj):
"""Return the day, as an int from 1 through 31.
"""
return space.int_w(space.getattr(w_obj, space.wrap("day")))
-@cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_DATE_GET_HOUR(space, w_obj):
"""Return the hour, as an int from 0 through 23.
"""
return space.int_w(space.getattr(w_obj, space.wrap("hour")))
-@cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_DATE_GET_MINUTE(space, w_obj):
"""Return the minute, as an int from 0 through 59.
"""
return space.int_w(space.getattr(w_obj, space.wrap("minute")))
-@cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_DATE_GET_SECOND(space, w_obj):
"""Return the second, as an int from 0 through 59.
"""
return space.int_w(space.getattr(w_obj, space.wrap("second")))
-@cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_DATE_GET_MICROSECOND(space, w_obj):
"""Return the microsecond, as an int from 0 through 999999.
"""
return space.int_w(space.getattr(w_obj, space.wrap("microsecond")))
-@cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_TIME_GET_HOUR(space, w_obj):
"""Return the hour, as an int from 0 through 23.
"""
return space.int_w(space.getattr(w_obj, space.wrap("hour")))
-@cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_TIME_GET_MINUTE(space, w_obj):
"""Return the minute, as an int from 0 through 59.
"""
return space.int_w(space.getattr(w_obj, space.wrap("minute")))
-@cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_TIME_GET_SECOND(space, w_obj):
"""Return the second, as an int from 0 through 59.
"""
return space.int_w(space.getattr(w_obj, space.wrap("second")))
-@cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_TIME_GET_MICROSECOND(space, w_obj):
"""Return the microsecond, as an int from 0 through 999999.
"""
@@ -248,14 +248,14 @@
# But it does not seem possible to expose a different structure
# for types defined in a python module like lib/datetime.py.
-@cpython_api([PyDateTime_Delta], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_DELTA_GET_DAYS(space, w_obj):
return space.int_w(space.getattr(w_obj, space.wrap("days")))
-@cpython_api([PyDateTime_Delta], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_DELTA_GET_SECONDS(space, w_obj):
return space.int_w(space.getattr(w_obj, space.wrap("seconds")))
-@cpython_api([PyDateTime_Delta], rffi.INT_real, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.INT_real, error=CANNOT_FAIL)
def PyDateTime_DELTA_GET_MICROSECONDS(space, w_obj):
return space.int_w(space.getattr(w_obj, space.wrap("microseconds")))
diff --git a/pypy/module/cpyext/floatobject.py
b/pypy/module/cpyext/floatobject.py
--- a/pypy/module/cpyext/floatobject.py
+++ b/pypy/module/cpyext/floatobject.py
@@ -48,7 +48,7 @@
def PyFloat_AsDouble(space, w_obj):
return space.float_w(space.float(w_obj))
-@cpython_api([PyObject], lltype.Float, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], lltype.Float, error=CANNOT_FAIL)
def PyFloat_AS_DOUBLE(space, w_float):
"""Return a C double representation of the contents of w_float, but
without error checking."""
diff --git a/pypy/module/cpyext/include/listobject.h
b/pypy/module/cpyext/include/listobject.h
--- a/pypy/module/cpyext/include/listobject.h
+++ b/pypy/module/cpyext/include/listobject.h
@@ -1,1 +1,1 @@
-#define PyList_GET_ITEM PyList_GetItem
+#define PyList_GET_ITEM(o, i) PyList_GetItem((PyObject*)(o), (i))
diff --git a/pypy/module/cpyext/intobject.py b/pypy/module/cpyext/intobject.py
--- a/pypy/module/cpyext/intobject.py
+++ b/pypy/module/cpyext/intobject.py
@@ -104,7 +104,7 @@
num = space.bigint_w(w_int)
return num.ulonglongmask()
-@cpython_api([PyObject], lltype.Signed, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], lltype.Signed, error=CANNOT_FAIL)
def PyInt_AS_LONG(space, w_int):
"""Return the value of the object w_int. No error checking is performed."""
return space.int_w(w_int)
diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py
--- a/pypy/module/cpyext/listobject.py
+++ b/pypy/module/cpyext/listobject.py
@@ -21,7 +21,7 @@
"""
return space.newlist([None] * len)
-@cpython_api([PyObject, Py_ssize_t, PyObject], PyObject, error=CANNOT_FAIL,
+@cpython_api([rffi.VOIDP, Py_ssize_t, PyObject], PyObject, error=CANNOT_FAIL,
result_borrowed=True)
def PyList_SET_ITEM(space, w_list, index, w_item):
"""Macro form of PyList_SetItem() without error checking. This is normally
@@ -87,7 +87,7 @@
space.call_method(space.w_list, "insert", w_list, space.wrap(index),
w_item)
return 0
-@cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], Py_ssize_t, error=CANNOT_FAIL)
def PyList_GET_SIZE(space, w_list):
"""Macro form of PyList_Size() without error checking.
"""
diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py
--- a/pypy/module/cpyext/pyfile.py
+++ b/pypy/module/cpyext/pyfile.py
@@ -1,10 +1,10 @@
from rpython.rtyper.lltypesystem import rffi, lltype
from pypy.module.cpyext.api import (
- cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, build_type_checkers, fdopen,
- fileno)
+ cpython_api, CANNOT_FAIL, CONST_STRING, FILEP, build_type_checkers, fdopen)
from pypy.module.cpyext.pyobject import PyObject
from pypy.module.cpyext.object import Py_PRINT_RAW
-from pypy.interpreter.error import OperationError, oefmt
+from pypy.interpreter.error import (OperationError, oefmt,
+ exception_from_saved_errno)
from pypy.module._file.interp_file import W_File
PyFile_Check, PyFile_CheckExact = build_type_checkers("File", W_File)
@@ -45,16 +45,29 @@
w_mode = space.wrap(rffi.charp2str(mode))
return space.call_method(space.builtin, 'file', w_filename, w_mode)
-@cpython_api([PyObject], FILEP, error=CANNOT_FAIL)
+@cpython_api([PyObject], FILEP, error=lltype.nullptr(FILEP.TO))
def PyFile_AsFile(space, w_p):
"""Return the file object associated with p as a FILE*.
If the caller will ever use the returned FILE* object while
the GIL is released it must also call the PyFile_IncUseCount() and
PyFile_DecUseCount() functions as appropriate."""
+ if not PyFile_Check(space, w_p):
+ raise oefmt(space.w_IOError, 'first argument must be an open file')
assert isinstance(w_p, W_File)
- return fdopen(space.int_w(space.call_method(w_p, 'fileno')),
- w_p.mode)
+ try:
+ fd = space.int_w(space.call_method(w_p, 'fileno'))
+ mode = w_p.mode
+ except OperationError as e:
+ raise oefmt(space.w_IOError, 'could not call fileno')
+ if (fd < 0 or not mode or mode[0] not in ['r', 'w', 'a', 'U'] or
+ ('U' in mode and ('w' in mode or 'a' in mode))):
+ raise oefmt(space.w_IOError, 'invalid fileno or mode')
+ ret = fdopen(fd, mode)
+ if not ret:
+ raise exception_from_saved_errno(space, space.w_IOError)
+ return ret
+
@cpython_api([FILEP, CONST_STRING, CONST_STRING, rffi.VOIDP], PyObject)
def PyFile_FromFile(space, fp, name, mode, close):
diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py
--- a/pypy/module/cpyext/sequence.py
+++ b/pypy/module/cpyext/sequence.py
@@ -54,7 +54,7 @@
except OperationError:
raise OperationError(space.w_TypeError, space.wrap(rffi.charp2str(m)))
-@cpython_api([PyObject, Py_ssize_t], PyObject, result_borrowed=True)
+@cpython_api([rffi.VOIDP, Py_ssize_t], PyObject, result_borrowed=True)
def PySequence_Fast_GET_ITEM(space, w_obj, index):
"""Return the ith element of o, assuming that o was returned by
PySequence_Fast(), o is not NULL, and that i is within bounds.
@@ -67,7 +67,7 @@
"PySequence_Fast_GET_ITEM called but object is not a list or "
"sequence")
-@cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], Py_ssize_t, error=CANNOT_FAIL)
def PySequence_Fast_GET_SIZE(space, w_obj):
"""Returns the length of o, assuming that o was returned by
PySequence_Fast() and that o is not NULL. The size can also be
@@ -82,7 +82,7 @@
"PySequence_Fast_GET_SIZE called but object is not a list or "
"sequence")
-@cpython_api([PyObject], PyObjectP)
+@cpython_api([rffi.VOIDP], PyObjectP)
def PySequence_Fast_ITEMS(space, w_obj):
"""Return the underlying array of PyObject pointers. Assumes that o was
returned
by PySequence_Fast() and o is not NULL.
@@ -119,7 +119,7 @@
space.delslice(w_obj, space.wrap(start), space.wrap(end))
return 0
-@cpython_api([PyObject, Py_ssize_t], PyObject)
+@cpython_api([rffi.VOIDP, Py_ssize_t], PyObject)
def PySequence_ITEM(space, w_obj, i):
"""Return the ith element of o or NULL on failure. Macro form of
PySequence_GetItem() but without checking that
diff --git a/pypy/module/cpyext/setobject.py b/pypy/module/cpyext/setobject.py
--- a/pypy/module/cpyext/setobject.py
+++ b/pypy/module/cpyext/setobject.py
@@ -74,7 +74,7 @@
space.call_method(space.w_set, 'clear', w_set)
return 0
-@cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], Py_ssize_t, error=CANNOT_FAIL)
def PySet_GET_SIZE(space, w_s):
"""Macro form of PySet_Size() without error checking."""
return space.int_w(space.len(w_s))
diff --git a/pypy/module/cpyext/test/test_bytesobject.py
b/pypy/module/cpyext/test/test_bytesobject.py
--- a/pypy/module/cpyext/test/test_bytesobject.py
+++ b/pypy/module/cpyext/test/test_bytesobject.py
@@ -288,6 +288,24 @@
# This does not test much, but at least the refcounts are checked.
assert module.test_intern_inplace('s') == 's'
+ def test_bytes_macros(self):
+ """The PyString_* macros cast, and calls expecting that build."""
+ module = self.import_extension('foo', [
+ ("test_macro_invocations", "METH_NOARGS",
+ """
+ PyObject* o = PyString_FromString("");
+ PyStringObject* u = (PyStringObject*)o;
+
+ PyString_GET_SIZE(u);
+ PyString_GET_SIZE(o);
+
+ PyString_AS_STRING(o);
+ PyString_AS_STRING(u);
+
+ return o;
+ """)])
+ assert module.test_macro_invocations() == ''
+
def test_hash_and_state(self):
module = self.import_extension('foo', [
("test_hash", "METH_VARARGS",
diff --git a/pypy/module/cpyext/test/test_datetime.py
b/pypy/module/cpyext/test/test_datetime.py
--- a/pypy/module/cpyext/test/test_datetime.py
+++ b/pypy/module/cpyext/test/test_datetime.py
@@ -117,3 +117,108 @@
datetime.timedelta,
datetime.tzinfo)
module.clear_types()
+
+ def test_macros(self):
+ module = self.import_extension('foo', [
+ ("test_date_macros", "METH_NOARGS",
+ """
+ PyObject* obj;
+ PyDateTime_Date* d;
+ PyDateTime_IMPORT;
+ if (!PyDateTimeAPI) {
+ PyErr_SetString(PyExc_RuntimeError, "No PyDateTimeAPI");
+ return NULL;
+ }
+ obj = PyDate_FromDate(2000, 6, 6);
+ d = (PyDateTime_Date*)obj;
+
+ PyDateTime_GET_YEAR(obj);
+ PyDateTime_GET_YEAR(d);
+
+ PyDateTime_GET_MONTH(obj);
+ PyDateTime_GET_MONTH(d);
+
+ PyDateTime_GET_DAY(obj);
+ PyDateTime_GET_DAY(d);
+
+ return obj;
+ """),
+ ("test_datetime_macros", "METH_NOARGS",
+ """
+ PyDateTime_IMPORT;
+ if (!PyDateTimeAPI) {
+ PyErr_SetString(PyExc_RuntimeError, "No PyDateTimeAPI");
+ return NULL;
+ }
+ PyObject* obj = PyDateTime_FromDateAndTime(2000, 6, 6, 6, 6,
6, 6);
+ PyDateTime_DateTime* dt = (PyDateTime_DateTime*)obj;
+
+ PyDateTime_GET_YEAR(obj);
+ PyDateTime_GET_YEAR(dt);
+
+ PyDateTime_GET_MONTH(obj);
+ PyDateTime_GET_MONTH(dt);
+
+ PyDateTime_GET_DAY(obj);
+ PyDateTime_GET_DAY(dt);
+
+ PyDateTime_DATE_GET_HOUR(obj);
+ PyDateTime_DATE_GET_HOUR(dt);
+
+ PyDateTime_DATE_GET_MINUTE(obj);
+ PyDateTime_DATE_GET_MINUTE(dt);
+
+ PyDateTime_DATE_GET_SECOND(obj);
+ PyDateTime_DATE_GET_SECOND(dt);
+
+ PyDateTime_DATE_GET_MICROSECOND(obj);
+ PyDateTime_DATE_GET_MICROSECOND(dt);
+
+ return obj;
+ """),
+ ("test_time_macros", "METH_NOARGS",
+ """
+ PyDateTime_IMPORT;
+ if (!PyDateTimeAPI) {
+ PyErr_SetString(PyExc_RuntimeError, "No PyDateTimeAPI");
+ return NULL;
+ }
+ PyObject* obj = PyTime_FromTime(6, 6, 6, 6);
+ PyDateTime_Time* t = (PyDateTime_Time*)obj;
+
+ PyDateTime_TIME_GET_HOUR(obj);
+ PyDateTime_TIME_GET_HOUR(t);
+
+ PyDateTime_TIME_GET_MINUTE(obj);
+ PyDateTime_TIME_GET_MINUTE(t);
+
+ PyDateTime_TIME_GET_SECOND(obj);
+ PyDateTime_TIME_GET_SECOND(t);
+
+ PyDateTime_TIME_GET_MICROSECOND(obj);
+ PyDateTime_TIME_GET_MICROSECOND(t);
+
+ return obj;
+ """),
+ ("test_delta_macros", "METH_NOARGS",
+ """
+ PyDateTime_IMPORT;
+ if (!PyDateTimeAPI) {
+ PyErr_SetString(PyExc_RuntimeError, "No PyDateTimeAPI");
+ return NULL;
+ }
+ PyObject* obj = PyDelta_FromDSU(6, 6, 6);
+ PyDateTime_Delta* delta = (PyDateTime_Delta*)obj;
+
+ PyDateTime_DELTA_GET_DAYS(obj);
+ PyDateTime_DELTA_GET_DAYS(delta);
+
+ PyDateTime_DELTA_GET_SECONDS(obj);
+ PyDateTime_DELTA_GET_SECONDS(delta);
+
+ PyDateTime_DELTA_GET_MICROSECONDS(obj);
+ PyDateTime_DELTA_GET_MICROSECONDS(delta);
+
+ return obj;
+ """),
+ ])
diff --git a/pypy/module/cpyext/test/test_floatobject.py
b/pypy/module/cpyext/test/test_floatobject.py
--- a/pypy/module/cpyext/test/test_floatobject.py
+++ b/pypy/module/cpyext/test/test_floatobject.py
@@ -77,3 +77,19 @@
neginf = module.return_neginf()
assert neginf < 0
assert math.isinf(neginf)
+
+ def test_macro_accepts_wrong_pointer_type(self):
+ import math
+
+ module = self.import_extension('foo', [
+ ("test_macros", "METH_NOARGS",
+ """
+ PyObject* o = PyFloat_FromDouble(1.0);
+ // no PyFloatObject
+ char* dumb_pointer = (char*)o;
+
+ PyFloat_AS_DOUBLE(o);
+ PyFloat_AS_DOUBLE(dumb_pointer);
+
+ Py_RETURN_NONE;"""),
+ ])
diff --git a/pypy/module/cpyext/test/test_intobject.py
b/pypy/module/cpyext/test/test_intobject.py
--- a/pypy/module/cpyext/test/test_intobject.py
+++ b/pypy/module/cpyext/test/test_intobject.py
@@ -191,3 +191,17 @@
i = mod.test_int()
assert isinstance(i, int)
assert i == 42
+
+ def test_int_macros(self):
+ mod = self.import_extension('foo', [
+ ("test_macros", "METH_NOARGS",
+ """
+ PyObject * obj = PyInt_FromLong(42);
+ PyIntObject * i = (PyIntObject*)obj;
+ PyInt_AS_LONG(obj);
+ PyInt_AS_LONG(i);
+ Py_RETURN_NONE;
+ """
+ ),
+ ])
+
diff --git a/pypy/module/cpyext/test/test_listobject.py
b/pypy/module/cpyext/test/test_listobject.py
--- a/pypy/module/cpyext/test/test_listobject.py
+++ b/pypy/module/cpyext/test/test_listobject.py
@@ -137,6 +137,33 @@
module.setlistitem(l,0)
assert l == [None, 2, 3]
+ def test_list_macros(self):
+ """The PyList_* macros cast, and calls expecting that build."""
+ module = self.import_extension('foo', [
+ ("test_macro_invocations", "METH_NOARGS",
+ """
+ PyObject* o = PyList_New(2);
+ PyListObject* l = (PyListObject*)o;
+
+
+ Py_INCREF(o);
+ PyList_SET_ITEM(o, 0, o);
+ Py_INCREF(o);
+ PyList_SET_ITEM(l, 1, o);
+
+ PyList_GET_ITEM(o, 0);
+ PyList_GET_ITEM(l, 1);
+
+ PyList_GET_SIZE(o);
+ PyList_GET_SIZE(l);
+
+ return o;
+ """
+ )
+ ])
+ x = module.test_macro_invocations()
+ assert x[0] is x[1] is x
+
def test_get_item_macro(self):
module = self.import_extension('foo', [
("test_get_item", "METH_NOARGS",
diff --git a/pypy/module/cpyext/test/test_sequence.py
b/pypy/module/cpyext/test/test_sequence.py
--- a/pypy/module/cpyext/test/test_sequence.py
+++ b/pypy/module/cpyext/test/test_sequence.py
@@ -155,6 +155,29 @@
result = api.PySequence_Index(w_gen, w_tofind)
assert result == 4
+class AppTestSetObject(AppTestCpythonExtensionBase):
+ def test_sequence_macro_cast(self):
+ module = self.import_extension('foo', [
+ ("test_macro_cast", "METH_NOARGS",
+ """
+ PyObject *o = PyList_New(0);
+ PyListObject* l;
+ PyList_Append(o, o);
+ l = (PyListObject*)o;
+
+ PySequence_Fast_GET_ITEM(o, 0);
+ PySequence_Fast_GET_ITEM(l, 0);
+
+ PySequence_Fast_GET_SIZE(o);
+ PySequence_Fast_GET_SIZE(l);
+
+ PySequence_ITEM(o, 0);
+ PySequence_ITEM(l, 0);
+
+ return o;
+ """
+ )
+ ])
class TestCPyListStrategy(BaseApiTest):
def test_getitem_setitem(self, space, api):
w_l = space.wrap([1, 2, 3, 4])
diff --git a/pypy/module/cpyext/test/test_setobject.py
b/pypy/module/cpyext/test/test_setobject.py
--- a/pypy/module/cpyext/test/test_setobject.py
+++ b/pypy/module/cpyext/test/test_setobject.py
@@ -2,6 +2,7 @@
from pypy.module.cpyext.pyobject import PyObject, PyObjectP, make_ref, from_ref
from pypy.module.cpyext.test.test_api import BaseApiTest
+from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase
from rpython.rtyper.lltypesystem import rffi, lltype
@@ -45,3 +46,20 @@
w_frozenset = space.newfrozenset([space.wrap(i) for i in [1, 2, 3, 4]])
assert api.PyAnySet_CheckExact(w_set)
assert api.PyAnySet_CheckExact(w_frozenset)
+
+class AppTestSetObject(AppTestCpythonExtensionBase):
+ def test_set_macro_cast(self):
+ module = self.import_extension('foo', [
+ ("test_macro_cast", "METH_NOARGS",
+ """
+ PyObject* o = PySet_New(NULL);
+ // no PySetObject
+ char* dumb_pointer = (char*) o;
+
+ PySet_GET_SIZE(o);
+ PySet_GET_SIZE(dumb_pointer);
+
+ return o;
+ """
+ )
+ ])
diff --git a/pypy/module/cpyext/test/test_translate.py
b/pypy/module/cpyext/test/test_translate.py
--- a/pypy/module/cpyext/test/test_translate.py
+++ b/pypy/module/cpyext/test/test_translate.py
@@ -11,11 +11,11 @@
FT = lltype.FuncType([], lltype.Signed)
FTPTR = lltype.Ptr(FT)
- def make_wrapper(space, func, gil=None):
+ def make_wrapper(self, space):
def wrapper():
- return func(space)
+ return self.callable(space)
return wrapper
- monkeypatch.setattr(pypy.module.cpyext.api, 'make_wrapper', make_wrapper)
+ monkeypatch.setattr(pypy.module.cpyext.api.ApiFunction, '_make_wrapper',
make_wrapper)
@specialize.memo()
def get_tp_function(space, typedef):
diff --git a/pypy/module/cpyext/test/test_unicodeobject.py
b/pypy/module/cpyext/test/test_unicodeobject.py
--- a/pypy/module/cpyext/test/test_unicodeobject.py
+++ b/pypy/module/cpyext/test/test_unicodeobject.py
@@ -111,6 +111,26 @@
assert isinstance(res, str)
assert res == 'caf?'
+ def test_unicode_macros(self):
+ """The PyUnicode_* macros cast, and calls expecting that build."""
+ module = self.import_extension('foo', [
+ ("test_macro_invocations", "METH_NOARGS",
+ """
+ PyObject* o = PyUnicode_FromString("");
+ PyUnicodeObject* u = (PyUnicodeObject*)o;
+
+ PyUnicode_GET_SIZE(u);
+ PyUnicode_GET_SIZE(o);
+
+ PyUnicode_GET_DATA_SIZE(u);
+ PyUnicode_GET_DATA_SIZE(o);
+
+ PyUnicode_AS_UNICODE(o);
+ PyUnicode_AS_UNICODE(u);
+ return o;
+ """)])
+ assert module.test_macro_invocations() == u''
+
class TestUnicode(BaseApiTest):
def test_unicodeobject(self, space, api):
assert api.PyUnicode_GET_SIZE(space.wrap(u'sp�m')) == 4
diff --git a/pypy/module/cpyext/test/test_weakref.py
b/pypy/module/cpyext/test/test_weakref.py
--- a/pypy/module/cpyext/test/test_weakref.py
+++ b/pypy/module/cpyext/test/test_weakref.py
@@ -7,7 +7,6 @@
w_ref = api.PyWeakref_NewRef(w_obj, space.w_None)
assert w_ref is not None
assert space.is_w(api.PyWeakref_GetObject(w_ref), w_obj)
- assert space.is_w(api.PyWeakref_GET_OBJECT(w_ref), w_obj)
assert space.is_w(api.PyWeakref_LockObject(w_ref), w_obj)
w_obj = space.newtuple([])
@@ -34,3 +33,26 @@
del w_obj
import gc; gc.collect()
assert space.is_w(api.PyWeakref_LockObject(w_ref), space.w_None)
+
+
+class AppTestWeakReference(AppTestCpythonExtensionBase):
+
+ def test_weakref_macro(self):
+ module = self.import_extension('foo', [
+ ("test_macro_cast", "METH_NOARGS",
+ """
+ // PyExc_Warning is some weak-reffable PyObject*.
+ char* dumb_pointer;
+ PyObject* weakref_obj = PyWeakref_NewRef(PyExc_Warning, NULL);
+ if (!weakref_obj) return weakref_obj;
+ // No public PyWeakReference type.
+ dumb_pointer = (char*) weakref_obj;
+
+ PyWeakref_GET_OBJECT(weakref_obj);
+ PyWeakref_GET_OBJECT(dumb_pointer);
+
+ return weakref_obj;
+ """
+ )
+ ])
+ module.test_macro_cast()
diff --git a/pypy/module/cpyext/unicodeobject.py
b/pypy/module/cpyext/unicodeobject.py
--- a/pypy/module/cpyext/unicodeobject.py
+++ b/pypy/module/cpyext/unicodeobject.py
@@ -188,33 +188,33 @@
"""Get the maximum ordinal for a Unicode character."""
return runicode.UNICHR(runicode.MAXUNICODE)
-@cpython_api([PyObject], rffi.CCHARP, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.CCHARP, error=CANNOT_FAIL)
def PyUnicode_AS_DATA(space, ref):
"""Return a pointer to the internal buffer of the object. o has to be a
PyUnicodeObject (not checked)."""
return rffi.cast(rffi.CCHARP, PyUnicode_AS_UNICODE(space, ref))
-@cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], Py_ssize_t, error=CANNOT_FAIL)
def PyUnicode_GET_DATA_SIZE(space, w_obj):
"""Return the size of the object's internal buffer in bytes. o has to be a
PyUnicodeObject (not checked)."""
return rffi.sizeof(lltype.UniChar) * PyUnicode_GET_SIZE(space, w_obj)
-@cpython_api([PyObject], Py_ssize_t, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], Py_ssize_t, error=CANNOT_FAIL)
def PyUnicode_GET_SIZE(space, w_obj):
"""Return the size of the object. o has to be a PyUnicodeObject (not
checked)."""
assert isinstance(w_obj, unicodeobject.W_UnicodeObject)
return space.len_w(w_obj)
-@cpython_api([PyObject], rffi.CWCHARP, error=CANNOT_FAIL)
+@cpython_api([rffi.VOIDP], rffi.CWCHARP, error=CANNOT_FAIL)
def PyUnicode_AS_UNICODE(space, ref):
"""Return a pointer to the internal Py_UNICODE buffer of the object. ref
has to be a PyUnicodeObject (not checked)."""
ref_unicode = rffi.cast(PyUnicodeObject, ref)
if not ref_unicode.c_str:
# Copy unicode buffer
- w_unicode = from_ref(space, ref)
+ w_unicode = from_ref(space, rffi.cast(PyObject, ref))
u = space.unicode_w(w_unicode)
ref_unicode.c_str = rffi.unicode2wcharp(u)
return ref_unicode.c_str
@@ -227,7 +227,7 @@
w_type = from_ref(space, rffi.cast(PyObject, ref.c_ob_type))
if not space.is_true(space.issubtype(w_type, space.w_unicode)):
raise oefmt(space.w_TypeError, "expected unicode object")
- return PyUnicode_AS_UNICODE(space, ref)
+ return PyUnicode_AS_UNICODE(space, rffi.cast(rffi.VOIDP, ref))
@cpython_api([PyObject], Py_ssize_t, error=-1)
def PyUnicode_GetSize(space, ref):
@@ -247,7 +247,7 @@
string may or may not be 0-terminated. It is the responsibility of the
caller
to make sure that the wchar_t string is 0-terminated in case this is
required by the application."""
- c_str = PyUnicode_AS_UNICODE(space, rffi.cast(PyObject, ref))
+ c_str = PyUnicode_AS_UNICODE(space, rffi.cast(rffi.VOIDP, ref))
c_length = ref.c_length
# If possible, try to copy the 0-termination as well
diff --git a/pypy/module/cpyext/weakrefobject.py
b/pypy/module/cpyext/weakrefobject.py
--- a/pypy/module/cpyext/weakrefobject.py
+++ b/pypy/module/cpyext/weakrefobject.py
@@ -1,6 +1,7 @@
from pypy.module.cpyext.api import cpython_api
from pypy.module.cpyext.pyobject import PyObject
from pypy.module._weakref.interp__weakref import W_Weakref, proxy
+from rpython.rtyper.lltypesystem import rffi
@cpython_api([PyObject, PyObject], PyObject)
def PyWeakref_NewRef(space, w_obj, w_callback):
@@ -37,7 +38,7 @@
"""
return space.call_function(w_ref) # borrowed ref
-@cpython_api([PyObject], PyObject, result_borrowed=True)
+@cpython_api([rffi.VOIDP], PyObject, result_borrowed=True)
def PyWeakref_GET_OBJECT(space, w_ref):
"""Similar to PyWeakref_GetObject(), but implemented as a macro that does
no
error checking.
diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py
--- a/pypy/module/imp/importing.py
+++ b/pypy/module/imp/importing.py
@@ -55,6 +55,14 @@
return '.' + soabi + SO
+def log_pyverbose(space, level, message):
+ if space.sys.w_initialdict is None:
+ return # sys module not initialised, avoid recursion
+ verbose = space.sys.get_flag('verbose')
+ if verbose >= level:
+ w_stderr = space.sys.get('stderr')
+ space.call_method(w_stderr, "write", space.wrap(message))
+
def file_exists(path):
"""Tests whether the given path is an existing regular file."""
return os.path.isfile(path) and case_ok(path)
@@ -537,6 +545,7 @@
path = space.str0_w(w_pathitem)
filepart = os.path.join(path, partname)
+ log_pyverbose(space, 2, "# trying %s" % (filepart,))
if os.path.isdir(filepart) and case_ok(filepart):
initfile = os.path.join(filepart, '__init__')
modtype, _, _ = find_modtype(space, initfile)
@@ -581,6 +590,8 @@
def load_c_extension(space, filename, modulename):
from pypy.module.cpyext.api import load_extension_module
+ log_pyverbose(space, 1, "import %s # from %s\n" %
+ (modulename, filename))
load_extension_module(space, filename, modulename)
# NB. cpyext.api.load_extension_module() can also delegate to _cffi_backend
@@ -881,6 +892,9 @@
"""
w = space.wrap
+ log_pyverbose(space, 1, "import %s # from %s\n" %
+ (space.str_w(w_modulename), pathname))
+
src_stat = os.fstat(fd)
cpathname = pathname + 'c'
mtime = int(src_stat[stat.ST_MTIME])
@@ -1003,6 +1017,9 @@
Load a module from a compiled file, execute it, and return its
module object.
"""
+ log_pyverbose(space, 1, "import %s # compiled from %s\n" %
+ (space.str_w(w_modulename), cpathname))
+
if magic != get_pyc_magic(space):
raise oefmt(space.w_ImportError, "Bad magic number in %s", cpathname)
#print "loading pyc file:", cpathname
diff --git a/pypy/module/imp/test/test_import.py
b/pypy/module/imp/test/test_import.py
--- a/pypy/module/imp/test/test_import.py
+++ b/pypy/module/imp/test/test_import.py
@@ -20,15 +20,13 @@
if pkgname:
p = p.join(*pkgname.split('.'))
p.ensure(dir=1)
- f = p.join("__init__.py").open('w')
- print >> f, "# package"
- f.close()
+ with p.join("__init__.py").open('w') as f:
+ print >> f, "# package"
for filename, content in entries.items():
filename += '.py'
- f = p.join(filename).open('w')
- print >> f, '#', filename
- print >> f, content
- f.close()
+ with p.join(filename).open('w') as f:
+ print >> f, '#', filename
+ print >> f, content
return p
def setup_directory_structure(space):
@@ -98,6 +96,9 @@
'a=5\nb=6\rc="""hello\r\nworld"""\r', mode='wb')
p.join('mod.py').write(
'a=15\nb=16\rc="""foo\r\nbar"""\r', mode='wb')
+ setuppkg("verbose1pkg", verbosemod='a = 1729')
+ setuppkg("verbose2pkg", verbosemod='a = 1729')
+ setuppkg("verbose0pkg", verbosemod='a = 1729')
setuppkg("test_bytecode",
a = '',
b = '',
@@ -532,9 +533,8 @@
import time
time.sleep(1)
- f = open(test_reload.__file__, "w")
- f.write("def test():\n raise NotImplementedError\n")
- f.close()
+ with open(test_reload.__file__, "w") as f:
+ f.write("def test():\n raise NotImplementedError\n")
reload(test_reload)
try:
test_reload.test()
@@ -550,9 +550,8 @@
import test_reload
import time
time.sleep(1)
- f = open(test_reload.__file__, "w")
- f.write("a = 10 // 0\n")
- f.close()
+ with open(test_reload.__file__, "w") as f:
+ f.write("a = 10 // 0\n")
# A failing reload should leave the previous module in sys.modules
raises(ZeroDivisionError, reload, test_reload)
@@ -684,7 +683,8 @@
import pkg
import os
pathname = os.path.join(os.path.dirname(pkg.__file__), 'a.py')
- module = imp.load_module('a', open(pathname),
+ with open(pathname) as fid:
+ module = imp.load_module('a', fid,
'invalid_path_name', ('.py', 'r',
imp.PY_SOURCE))
assert module.__name__ == 'a'
assert module.__file__ == 'invalid_path_name'
@@ -719,6 +719,68 @@
else:
raise AssertionError("should have failed")
+ def test_verbose_flag_1(self):
+ output = []
+ class StdErr(object):
+ def write(self, line):
+ output.append(line)
+
+ import sys
+ old_flags = sys.flags
+
+ class Flags(object):
+ verbose = 1
+ def __getattr__(self, name):
+ return getattr(old_flags, name)
+
+ sys.flags = Flags()
+ sys.stderr = StdErr()
+ try:
+ import verbose1pkg.verbosemod
+ finally:
+ reload(sys)
+ assert 'import verbose1pkg # from ' in output[-2]
+ assert 'import verbose1pkg.verbosemod # from ' in output[-1]
+
+ def test_verbose_flag_2(self):
+ output = []
+ class StdErr(object):
+ def write(self, line):
+ output.append(line)
+
+ import sys
+ old_flags = sys.flags
+
+ class Flags(object):
+ verbose = 2
+ def __getattr__(self, name):
+ return getattr(old_flags, name)
+
+ sys.flags = Flags()
+ sys.stderr = StdErr()
+ try:
+ import verbose2pkg.verbosemod
+ finally:
+ reload(sys)
+ assert any('import verbose2pkg # from ' in line
+ for line in output[:-2])
+ assert output[-2].startswith('# trying')
+ assert 'import verbose2pkg.verbosemod # from ' in output[-1]
+
+ def test_verbose_flag_0(self):
+ output = []
+ class StdErr(object):
+ def write(self, line):
+ output.append(line)
+
+ import sys
+ sys.stderr = StdErr()
+ try:
+ import verbose0pkg.verbosemod
+ finally:
+ reload(sys)
+ assert not output
+
class TestAbi:
def test_abi_tag(self):
@@ -786,8 +848,8 @@
assert ret is None
# check for empty .pyc file
- f = open(cpathname, 'wb')
- f.close()
+ with open(cpathname, 'wb') as f:
+ pass
ret = importing.check_compiled_module(space,
cpathname,
mtime)
@@ -1326,7 +1388,8 @@
assert importer is None
# an existing file
path = os.path.join(self.udir, 'test_getimporter')
- open(path, 'w').close()
+ with open(path, 'w') as f:
+ pass
importer = imp._getimporter(path)
assert isinstance(importer, imp.NullImporter)
# a non-existing path
@@ -1335,8 +1398,8 @@
assert isinstance(importer, imp.NullImporter)
# a mostly-empty zip file
path = os.path.join(self.udir, 'test_getimporter.zip')
- f = open(path, 'wb')
- f.write('PK\x03\x04\n\x00\x00\x00\x00\x00P\x9eN>\x00\x00\x00\x00\x00'
+ with open(path, 'wb') as f:
+
f.write('PK\x03\x04\n\x00\x00\x00\x00\x00P\x9eN>\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x00\x05\x00\x15\x00emptyUT\t\x00'
'\x03wyYMwyYMUx\x04\x00\xf4\x01d\x00PK\x01\x02\x17\x03\n\x00'
'\x00\x00\x00\x00P\x9eN>\x00\x00\x00\x00\x00\x00\x00\x00\x00'
@@ -1344,7 +1407,6 @@
'\xa4\x81\x00\x00\x00\x00emptyUT\x05\x00\x03wyYMUx\x00\x00PK'
'\x05\x06\x00\x00\x00\x00\x01\x00\x01\x00@\x00\x00\x008\x00'
'\x00\x00\x00\x00')
- f.close()
importer = imp._getimporter(path)
import zipimport
assert isinstance(importer, zipimport.zipimporter)
diff --git a/pypy/module/micronumpy/ndarray.py
b/pypy/module/micronumpy/ndarray.py
--- a/pypy/module/micronumpy/ndarray.py
+++ b/pypy/module/micronumpy/ndarray.py
@@ -443,7 +443,7 @@
'array does not have imaginary part to set')
self.implementation.set_imag(space, self, w_value)
- def reshape(self, space, w_shape, order):
+ def reshape(self, space, w_shape, order=NPY.ANYORDER):
new_shape = get_shape_from_iterable(space, self.get_size(), w_shape)
new_impl = self.implementation.reshape(self, new_shape, order)
if new_impl is not None:
diff --git a/pypy/module/micronumpy/test/test_ufuncs.py
b/pypy/module/micronumpy/test/test_ufuncs.py
--- a/pypy/module/micronumpy/test/test_ufuncs.py
+++ b/pypy/module/micronumpy/test/test_ufuncs.py
@@ -1480,7 +1480,21 @@
def test_outer(self):
import numpy as np
- from numpy import absolute
+ c = np.multiply.outer([1, 2, 3], [4, 5, 6])
+ assert c.shape == (3, 3)
+ assert (c ==[[ 4, 5, 6],
+ [ 8, 10, 12],
+ [12, 15, 18]]).all()
+ A = np.array([[1, 2, 3], [4, 5, 6]])
+ B = np.array([[1, 2, 3, 4]])
+ c = np.multiply.outer(A, B)
+ assert c.shape == (2, 3, 1, 4)
+ assert (c == [[[[ 1, 2, 3, 4]],
+ [[ 2, 4, 6, 8]],
+ [[ 3, 6, 9, 12]]],
+ [[[ 4, 8, 12, 16]],
+ [[ 5, 10, 15, 20]],
+ [[ 6, 12, 18, 24]]]]).all()
exc = raises(ValueError, np.absolute.outer, [-1, -2])
assert exc.value[0] == 'outer product only supported for binary
functions'
diff --git a/pypy/module/micronumpy/ufuncs.py b/pypy/module/micronumpy/ufuncs.py
--- a/pypy/module/micronumpy/ufuncs.py
+++ b/pypy/module/micronumpy/ufuncs.py
@@ -363,12 +363,18 @@
out = space.call_method(obj, '__array_wrap__', out,
space.w_None)
return out
- def descr_outer(self, space, __args__):
- return self._outer(space, __args__)
-
- def _outer(self, space, __args__):
- raise oefmt(space.w_ValueError,
+ def descr_outer(self, space, args_w):
+ if self.nin != 2:
+ raise oefmt(space.w_ValueError,
"outer product only supported for binary functions")
+ if len(args_w) != 2:
+ raise oefmt(space.w_ValueError,
+ "exactly two arguments expected")
+ args = [convert_to_array(space, w_obj) for w_obj in args_w]
+ w_outshape = [space.wrap(i) for i in args[0].get_shape() +
[1]*args[1].ndims()]
+ args0 = args[0].reshape(space, space.newtuple(w_outshape))
+ return self.descr_call(space, Arguments.frompacked(space,
+ space.newlist([args0,
args[1]])))
def parse_kwargs(self, space, kwds_w):
w_casting = kwds_w.pop('casting', None)
diff --git a/pypy/tool/release/repackage.sh b/pypy/tool/release/repackage.sh
--- a/pypy/tool/release/repackage.sh
+++ b/pypy/tool/release/repackage.sh
@@ -1,26 +1,33 @@
# Edit these appropriately before running this script
maj=5
min=1
-rev=1
+rev=2
branchname=release-$maj.x # ==OR== release-$maj.$min.x
tagname=release-$maj.$min.$rev # ==OR== release-$maj.$min
+echo checking hg log -r $branchname
hg log -r $branchname || exit 1
+echo checking hg log -r $tagname
hg log -r $tagname || exit 1
# This script will download latest builds from the buildmaster, rename the top
# level directory, and repackage ready to be uploaded to bitbucket. It will
also
# download source, assuming a tag for the release already exists, and
repackage them.
# The script should be run in an empty directory, i.e. /tmp/release_xxx
-
for plat in linux linux64 linux-armhf-raspbian linux-armhf-raring linux-armel
osx64 s390x
do
+ echo downloading package for $plat
wget
http://buildbot.pypy.org/nightly/$branchname/pypy-c-jit-latest-$plat.tar.bz2
tar -xf pypy-c-jit-latest-$plat.tar.bz2
rm pypy-c-jit-latest-$plat.tar.bz2
- mv pypy-c-jit-*-$plat pypy-$maj.$min.$rev-$plat
- tar --owner=root --group=root --numeric-owner -cvjf
pypy-$maj.$min.$rev-$plat.tar.bz2 pypy-$maj.$min.$rev-$plat
- rm -rf pypy-$maj.$min.$rev-$plat
+ plat_final=$plat
+ if [ $plat = linux ]; then
+ plat_final=linux32
+ fi
+ mv pypy-c-jit-*-$plat pypy-$maj.$min.$rev-$plat_final
+ echo packaging $plat_final
+ tar --owner=root --group=root --numeric-owner -cvjf
pypy-$maj.$min.$rev-$plat_final.tar.bz2 pypy-$maj.$min.$rev-$plat_final
+ rm -rf pypy-$maj.$min.$rev-$plat_final
done
plat=win32
diff --git a/pypy/tool/test/test_tab.py b/pypy/tool/test/test_tab.py
--- a/pypy/tool/test/test_tab.py
+++ b/pypy/tool/test/test_tab.py
@@ -7,7 +7,11 @@
ROOT = os.path.abspath(os.path.join(pypydir, '..'))
RPYTHONDIR = os.path.join(ROOT, "rpython")
-EXCLUDE = {'/virt_test/lib/python2.7/site-packages/setuptools'}
+
+EXCLUDE = {'/virt_test'}
+# ^^^ don't look inside this: it is created by virtualenv on buildslaves.
+# It contains third-party installations that may include tabs in their
+# .py files.
def test_no_tabs():
diff --git a/rpython/jit/backend/zarch/regalloc.py
b/rpython/jit/backend/zarch/regalloc.py
--- a/rpython/jit/backend/zarch/regalloc.py
+++ b/rpython/jit/backend/zarch/regalloc.py
@@ -312,13 +312,21 @@
even, odd = r.r2, r.r3
old_even_var = reverse_mapping.get(even, None)
old_odd_var = reverse_mapping.get(odd, None)
+
+ # forbid r2 and r3 to be in free regs!
+ self.free_regs = [fr for fr in self.free_regs \
+ if fr is not even and \
+ fr is not odd]
+
if old_even_var:
if old_even_var in forbidden_vars:
self._relocate_forbidden_variable(even, old_even_var,
reverse_mapping,
forbidden_vars, odd)
else:
+ # old even var is not forbidden, sync it and be done with
it
self._sync_var(old_even_var)
del self.reg_bindings[old_even_var]
+ del reverse_mapping[odd]
if old_odd_var:
if old_odd_var in forbidden_vars:
self._relocate_forbidden_variable(odd, old_odd_var,
reverse_mapping,
@@ -326,10 +334,8 @@
else:
self._sync_var(old_odd_var)
del self.reg_bindings[old_odd_var]
+ del reverse_mapping[odd]
- self.free_regs = [fr for fr in self.free_regs \
- if fr is not even and \
- fr is not odd]
self.reg_bindings[even_var] = even
self.reg_bindings[odd_var] = odd
return even, odd
@@ -342,10 +348,11 @@
self.assembler.regalloc_mov(reg, candidate)
self.reg_bindings[var] = candidate
reverse_mapping[candidate] = var
+ return # we found a location for that forbidden var!
for candidate in r.MANAGED_REGS:
# move register of var to another register
- # thus it is not allowed to bei either reg or forbidden_reg
+ # it is NOT allowed to be a reg or forbidden_reg
if candidate is reg or candidate is forbidden_reg:
continue
# neither can we allow to move it to a register of another
forbidden variable
@@ -354,11 +361,11 @@
if candidate_var is not None:
self._sync_var(candidate_var)
del self.reg_bindings[candidate_var]
+ del reverse_mapping[candidate]
self.assembler.regalloc_mov(reg, candidate)
assert var is not None
self.reg_bindings[var] = candidate
reverse_mapping[candidate] = var
- self.free_regs.append(reg)
break
else:
raise NoVariableToSpill
diff --git a/rpython/memory/gc/env.py b/rpython/memory/gc/env.py
--- a/rpython/memory/gc/env.py
+++ b/rpython/memory/gc/env.py
@@ -210,7 +210,7 @@
"Warning: cannot find your CPU L2 cache size in /proc/cpuinfo")
return -1
-def get_L2cache_linux2_cpuinfo_s390x(filename="/proc/cpuinfo", label='cache3'):
+def get_L2cache_linux2_cpuinfo_s390x(filename="/proc/cpuinfo", label='cache2'):
debug_start("gc-hardware")
L2cache = sys.maxint
try:
@@ -233,29 +233,19 @@
start = _findend(data, '\n' + label, linepos)
if start < 0:
break # done
- linepos = _findend(data, '\n', start)
- if linepos < 0:
- break # no end-of-line??
- # *** data[start:linepos] == " : level=2 type=Instruction
scope=Private size=2048K ..."
- start = _skipspace(data, start)
- if data[start] != ':':
+ start = _findend(data, 'size=', start)
+ if start < 0:
+ break
+ end = _findend(data, ' ', start) - 1
+ if end < 0:
+ break
+ linepos = end
+ size = data[start:end]
+ last_char = len(size)-1
+ assert 0 <= last_char < len(size)
+ if size[last_char] not in ('K', 'k'): # assume kilobytes for now
continue
- # *** data[start:linepos] == ": level=2 type=Instruction
scope=Private size=2048K ..."
- start = _skipspace(data, start + 1)
- # *** data[start:linepos] == "level=2 type=Instruction
scope=Private size=2048K ..."
- start += 44
- end = start
- while '0' <= data[end] <= '9':
- end += 1
- # *** data[start:end] == "2048"
- if start == end:
- continue
- number = int(data[start:end])
- # *** data[end:linepos] == " KB\n"
- end = _skipspace(data, end)
- if data[end] not in ('K', 'k'): # assume kilobytes for now
- continue
- number = number * 1024
+ number = int(size[:last_char])* 1024
# for now we look for the smallest of the L2 caches of the CPUs
if number < L2cache:
L2cache = number
diff --git a/rpython/memory/gc/test/test_env.py
b/rpython/memory/gc/test/test_env.py
--- a/rpython/memory/gc/test/test_env.py
+++ b/rpython/memory/gc/test/test_env.py
@@ -162,21 +162,31 @@
result = env.get_L2cache_linux2_cpuinfo(str(filepath))
assert result == 3072 * 1024
-def test_estimate_best_nursery_size_linux2_s390x():
+def test_estimate_nursery_s390x():
filepath = udir.join('estimate_best_nursery_size_linux2')
filepath.write("""\
vendor_id : IBM/S390
# processors : 2
bogomips per cpu: 20325.00
-features : esan3 zarch stfle msa ldisp eimm dfp etf3eh highgprs
-cache0 : level=1 type=Data scope=Private size=128K line_size=256
associativity=8
-cache1 : level=1 type=Instruction scope=Private size=96K
line_size=256 associativity=6
+...
cache2 : level=2 type=Data scope=Private size=2048K line_size=256
associativity=8
cache3 : level=2 type=Instruction scope=Private size=2048K
line_size=256 associativity=8
-cache4 : level=3 type=Unified scope=Shared size=65536K line_size=256
associativity=16
-cache5 : level=4 type=Unified scope=Shared size=491520K line_size=256
associativity=30
-processor 0: version = FF, identification = 026A77, machine = 2964
-processor 1: version = FF, identification = 026A77, machine = 2964
+...
""")
result = env.get_L2cache_linux2_cpuinfo_s390x(str(filepath))
assert result == 2048 * 1024
+
+ filepath = udir.join('estimate_best_nursery_size_linux3')
+ filepath.write("""\
+vendor_id : IBM/S390
+# processors : 2
+bogomips per cpu: 9398.00
+...
+cache2 : level=2 type=Unified scope=Private size=1536K line_size=256
associativity=12
+cache3 : level=3 type=Unified scope=Shared size=24576K line_size=256
associativity=12
+...
+""")
+ result = env.get_L2cache_linux2_cpuinfo_s390x(str(filepath),
label='cache3')
+ assert result == 24576 * 1024
+ result = env.get_L2cache_linux2_cpuinfo_s390x(str(filepath),
label='cache2')
+ assert result == 1536 * 1024
diff --git a/rpython/rlib/rawrefcount.py b/rpython/rlib/rawrefcount.py
--- a/rpython/rlib/rawrefcount.py
+++ b/rpython/rlib/rawrefcount.py
@@ -27,13 +27,13 @@
"""NOT_RPYTHON: set up rawrefcount with the GC. This is only used
for tests; it should not be called at all during translation.
"""
- global _p_list, _o_list, _adr2pypy, _pypy2ob, _ob_set
+ global _p_list, _o_list, _adr2pypy, _pypy2ob, _pypy2ob_rev
global _d_list, _dealloc_trigger_callback
_p_list = []
_o_list = []
_adr2pypy = [None]
_pypy2ob = {}
- _ob_set = set()
+ _pypy2ob_rev = {}
_d_list = []
_dealloc_trigger_callback = dealloc_trigger_callback
@@ -41,23 +41,22 @@
"NOT_RPYTHON: a link where the PyPy object contains some or all the data"
#print 'create_link_pypy\n\t%s\n\t%s' % (p, ob)
assert p not in _pypy2ob
- assert ob._obj not in _ob_set
+ assert ob._obj not in _pypy2ob_rev
assert not ob.c_ob_pypy_link
ob.c_ob_pypy_link = _build_pypy_link(p)
_pypy2ob[p] = ob
+ _pypy2ob_rev[ob._obj] = p
_p_list.append(ob)
- _ob_set.add(ob._obj)
def create_link_pyobj(p, ob):
"""NOT_RPYTHON: a link where the PyObject contains all the data.
from_obj() will not work on this 'p'."""
#print 'create_link_pyobj\n\t%s\n\t%s' % (p, ob)
assert p not in _pypy2ob
- assert ob._obj not in _ob_set
+ assert ob._obj not in _pypy2ob_rev
assert not ob.c_ob_pypy_link
ob.c_ob_pypy_link = _build_pypy_link(p)
_o_list.append(ob)
- _ob_set.add(ob._obj)
def from_obj(OB_PTR_TYPE, p):
"NOT_RPYTHON"
@@ -65,6 +64,7 @@
if ob is None:
return lltype.nullptr(OB_PTR_TYPE.TO)
assert lltype.typeOf(ob) == OB_PTR_TYPE
+ assert _pypy2ob_rev[ob._obj] is p
return ob
def to_obj(Class, ob):
@@ -111,8 +111,10 @@
new_p_list.append(ob)
else:
p = detach(ob, wr_p_list)
- del _pypy2ob[p]
- del p
+ ob_test = _pypy2ob.pop(p)
+ p_test = _pypy2ob_rev.pop(ob_test._obj)
+ assert p_test is p
+ del p, p_test
ob = None
_p_list = Ellipsis
@@ -156,6 +158,10 @@
p = attach(ob, wr, _p_list)
if p is not None:
_pypy2ob[p] = ob
+ _pypy2ob_rev.clear() # rebuild this dict from scratch
+ for p, ob in _pypy2ob.items():
+ assert ob._obj not in _pypy2ob_rev
+ _pypy2ob_rev[ob._obj] = p
_o_list = []
for ob, wr in wr_o_list:
attach(ob, wr, _o_list)
diff --git a/rpython/tool/algo/test/test_regalloc.py
b/rpython/tool/algo/test/test_regalloc.py
new file mode 100644
--- /dev/null
+++ b/rpython/tool/algo/test/test_regalloc.py
@@ -0,0 +1,60 @@
+from rpython.rtyper.test.test_llinterp import gengraph
+from rpython.rtyper.lltypesystem import lltype
+from rpython.tool.algo.regalloc import perform_register_allocation
+from rpython.flowspace.model import Variable
+from rpython.conftest import option
+
+
+def is_int(v):
+ return v.concretetype == lltype.Signed
+
+def check_valid(graph, regalloc, consider_var):
+ if getattr(option, 'view', False):
+ graph.show()
+ num_renamings = 0
+ for block in graph.iterblocks():
+ inputs = [v for v in block.inputargs if consider_var(v)]
+ colors = [regalloc.getcolor(v) for v in inputs]
+ print inputs, ':', colors
+ assert len(inputs) == len(set(colors))
+ in_use = dict(zip(colors, inputs))
+ for op in block.operations:
+ for v in op.args:
+ if isinstance(v, Variable) and consider_var(v):
+ assert in_use[regalloc.getcolor(v)] is v
+ if consider_var(op.result):
+ in_use[regalloc.getcolor(op.result)] = op.result
+ for link in block.exits:
+ for i, v in enumerate(link.args):
+ if consider_var(v):
+ assert in_use[regalloc.getcolor(v)] is v
+ w = link.target.inputargs[i]
+ if regalloc.getcolor(v) is not regalloc.getcolor(w):
+ print '\trenaming %s:%d -> %s:%d' % (
+ v, regalloc.getcolor(v), w, regalloc.getcolor(w))
+ num_renamings += 1
+ return num_renamings
+
+
+def test_loop_1():
+ def f(a, b):
+ while a > 0:
+ b += a
+ a -= 1
+ return b
+ t, rtyper, graph = gengraph(f, [int, int], viewbefore=False)
+ regalloc = perform_register_allocation(graph, is_int)
+ num_renamings = check_valid(graph, regalloc, is_int)
+ assert num_renamings == 0
+
+def test_loop_2():
+ def f(a, b):
+ while a > 0:
+ b += a
+ if b < 10:
+ a, b = b, a
+ a -= 1
+ return b
+ t, rtyper, graph = gengraph(f, [int, int], viewbefore=False)
+ regalloc = perform_register_allocation(graph, is_int)
+ check_valid(graph, regalloc, is_int)
diff --git a/rpython/translator/c/src/int.h b/rpython/translator/c/src/int.h
--- a/rpython/translator/c/src/int.h
+++ b/rpython/translator/c/src/int.h
@@ -53,7 +53,21 @@
/* addition, subtraction */
#define OP_INT_ADD(x,y,r) r = (x) + (y)
+#define OP_INT_SUB(x,y,r) r = (x) - (y)
+#define OP_INT_MUL(x,y,r) r = (x) * (y)
+
+#ifdef __GNUC__
+# if __GNUC__ >= 5
+# define HAVE_BUILTIN_OVERFLOW
+# elif defined(__has_builtin) /* clang */
+# if __has_builtin(__builtin_mul_overflow)
+# define HAVE_BUILTIN_OVERFLOW
+# endif
+# endif
+#endif
+
+#ifndef HAVE_BUILTIN_OVERFLOW
/* cast to avoid undefined behaviour on overflow */
#define OP_INT_ADD_OVF(x,y,r) \
r = (Signed)((Unsigned)x + y); \
@@ -63,14 +77,10 @@
r = (Signed)((Unsigned)x + y); \
if ((r&~x) < 0) FAIL_OVF("integer addition")
-#define OP_INT_SUB(x,y,r) r = (x) - (y)
-
#define OP_INT_SUB_OVF(x,y,r) \
r = (Signed)((Unsigned)x - y); \
if ((r^x) < 0 && (r^~y) < 0) FAIL_OVF("integer subtraction")
-#define OP_INT_MUL(x,y,r) r = (x) * (y)
-
#if SIZEOF_LONG * 2 <= SIZEOF_LONG_LONG && !defined(_WIN64)
#define OP_INT_MUL_OVF(x,y,r) \
{ \
@@ -83,6 +93,17 @@
r = op_llong_mul_ovf(x, y) /* long == long long */
#endif
+#else /* HAVE_BUILTIN_OVERFLOW */
+#define OP_INT_ADD_NONNEG_OVF(x,y,r) OP_INT_ADD_OVF(x,y,r)
+#define OP_INT_ADD_OVF(x,y,r) \
+ if (__builtin_add_overflow(x, y, &r)) FAIL_OVF("integer addition")
+#define OP_INT_SUB_OVF(x,y,r) \
+ if (__builtin_sub_overflow(x, y, &r)) FAIL_OVF("integer subtraction")
+#define OP_INT_MUL_OVF(x,y,r) \
+ if (__builtin_mul_overflow(x, y, &r)) FAIL_OVF("integer multiplication")
+#endif
+
+
/* shifting */
/* NB. shifting has same limitations as C: the shift count must be
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit