Author: Manuel Jacob <[email protected]>
Branch: py3k
Changeset: r77667:7cf68865e552
Date: 2015-05-29 01:59 +0200
http://bitbucket.org/pypy/pypy/changeset/7cf68865e552/
Log: hg merge 5acade5a80c5
This is part of a series of commits to merge default into the py3k
branch. The merge is very large, so it's easier when split into
smaller pieces.
diff too long, truncating to 2000 out of 11603 lines
diff --git a/LICENSE b/LICENSE
--- a/LICENSE
+++ b/LICENSE
@@ -420,3 +420,10 @@
the terms of the GPL license version 2 or any later version. Thus the
_gdbm module, provided in the file lib_pypy/_gdbm.py, is redistributed
under the terms of the GPL license as well.
+
+License for 'pypy/module/_vmprof/src'
+--------------------------------------
+
+The code is based on gperftools. You may see a copy of the License for it at
+
+ https://code.google.com/p/gperftools/source/browse/COPYING
diff --git a/lib-python/2.7/socket.py b/lib-python/2.7/socket.py
--- a/lib-python/2.7/socket.py
+++ b/lib-python/2.7/socket.py
@@ -145,6 +145,34 @@
name = hostname
return name
+class RefCountingWarning(UserWarning):
+ pass
+
+def _do_reuse_or_drop(socket, methname):
+ try:
+ method = getattr(socket, methname)
+ except (AttributeError, TypeError):
+ warnings.warn("""'%s' object has no _reuse/_drop methods
+{{
+ You make use (or a library you are using makes use) of the internal
+ classes '_socketobject' and '_fileobject' in socket.py, initializing
+ them with custom objects. On PyPy, these custom objects need two
+ extra methods, _reuse() and _drop(), that maintain an explicit
+ reference counter. When _drop() has been called as many times as
+ _reuse(), then the object should be freed.
+
+ Without these methods, you get the warning here. This is to
+ prevent the following situation: if your (or the library's) code
+ relies on reference counting for prompt closing, then on PyPy, the
+ __del__ method will be called later than on CPython. You can
+ easily end up in a situation where you open and close a lot of
+ (high-level) '_socketobject' or '_fileobject', but the (low-level)
+ custom objects will accumulate before their __del__ are called.
+ You quickly risk running out of file descriptors, for example.
+}}""" % (socket.__class__.__name__,), RefCountingWarning, stacklevel=3)
+ else:
+ method()
+
_socketmethods = (
'bind', 'connect', 'connect_ex', 'fileno', 'listen',
@@ -182,19 +210,7 @@
if _sock is None:
_sock = _realsocket(family, type, proto)
else:
- # PyPy note about refcounting: implemented with _reuse()/_drop()
- # on the class '_socket.socket'. Python 3 did it differently
- # with a reference counter on this class 'socket._socketobject'
- # instead, but it is a less compatible change.
-
- # Note that a few libraries (like eventlet) poke at the
- # private implementation of socket.py, passing custom
- # objects to _socketobject(). These libraries need the
- # following fix for use on PyPy: the custom objects need
- # methods _reuse() and _drop() that maintains an explicit
- # reference counter, starting at 0. When it drops back to
- # zero, close() must be called.
- _sock._reuse()
+ _do_reuse_or_drop(_sock, '_reuse')
self._sock = _sock
@@ -228,13 +244,13 @@
def close(self):
s = self._sock
self._sock = _closedsocket()
- s._drop()
+ _do_reuse_or_drop(s, '_drop')
close.__doc__ = _realsocket.close.__doc__
def accept(self):
sock, addr = self._sock.accept()
sockobj = _socketobject(_sock=sock)
- sock._drop() # already a copy in the _socketobject()
+ _do_reuse_or_drop(sock, '_drop') # already a copy in the
_socketobject()
return sockobj, addr
accept.__doc__ = _realsocket.accept.__doc__
@@ -290,14 +306,7 @@
"_close"]
def __init__(self, sock, mode='rb', bufsize=-1, close=False):
- # Note that a few libraries (like eventlet) poke at the
- # private implementation of socket.py, passing custom
- # objects to _fileobject(). These libraries need the
- # following fix for use on PyPy: the custom objects need
- # methods _reuse() and _drop() that maintains an explicit
- # reference counter, starting at 0. When it drops back to
- # zero, close() must be called.
- sock._reuse()
+ _do_reuse_or_drop(sock, '_reuse')
self._sock = sock
self.mode = mode # Not actually used in this version
if bufsize < 0:
@@ -338,7 +347,7 @@
if self._close:
s.close()
else:
- s._drop()
+ _do_reuse_or_drop(s, '_drop')
def __del__(self):
try:
diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py
--- a/lib_pypy/_ctypes/function.py
+++ b/lib_pypy/_ctypes/function.py
@@ -275,7 +275,11 @@
if argtypes:
args =
[argtype._CData_retval(argtype.from_address(arg)._buffer)
for argtype, arg in zip(argtypes, args)]
- return to_call(*args)
+ try:
+ return to_call(*args)
+ except SystemExit, e:
+ handle_system_exit(e)
+ raise
return f
def __call__(self, *args, **kwargs):
@@ -304,7 +308,11 @@
except (UnicodeError, TypeError, ValueError) as e:
raise ArgumentError(str(e))
try:
- res = self.callable(*newargs)
+ try:
+ res = self.callable(*newargs)
+ except SystemExit, e:
+ handle_system_exit(e)
+ raise
except:
exc_info = sys.exc_info()
traceback.print_tb(exc_info[2], file=sys.stderr)
@@ -715,3 +723,22 @@
make_fastpath_subclass.memo[CFuncPtr] = CFuncPtrFast
return CFuncPtrFast
make_fastpath_subclass.memo = {}
+
+
+def handle_system_exit(e):
+ # issue #1194: if we get SystemExit here, then exit the interpreter.
+ # Highly obscure imho but some people seem to depend on it.
+ if sys.flags.inspect:
+ return # Don't exit if -i flag was given.
+ else:
+ code = e.code
+ if isinstance(code, int):
+ exitcode = code
+ else:
+ f = getattr(sys, 'stderr', None)
+ if f is None:
+ f = sys.__stderr__
+ print >> f, code
+ exitcode = 1
+
+ _rawffi.exit(exitcode)
diff --git a/lib_pypy/_functools.py b/lib_pypy/_functools.py
--- a/lib_pypy/_functools.py
+++ b/lib_pypy/_functools.py
@@ -32,16 +32,16 @@
partial(func, *args, **keywords) - new function with partial application
of the given arguments and keywords.
"""
-
- def __init__(self, *args, **keywords):
- if not args:
- raise TypeError('__init__() takes at least 2 arguments (1 given)')
- func, args = args[0], args[1:]
+ def __init__(*args, **keywords):
+ if len(args) < 2:
+ raise TypeError('__init__() takes at least 2 arguments (%d given)'
+ % len(args))
+ self, func, args = args[0], args[1], args[2:]
if not callable(func):
raise TypeError("the first argument must be callable")
self._func = func
self._args = args
- self._keywords = keywords or None
+ self._keywords = keywords
def __delattr__(self, key):
if key == '__dict__':
@@ -61,9 +61,9 @@
return self._keywords
def __call__(self, *fargs, **fkeywords):
- if self.keywords is not None:
- fkeywords = dict(self.keywords, **fkeywords)
- return self.func(*(self.args + fargs), **fkeywords)
+ if self._keywords:
+ fkeywords = dict(self._keywords, **fkeywords)
+ return self._func(*(self._args + fargs), **fkeywords)
def __repr__(self):
cls = type(self)
@@ -84,10 +84,13 @@
('_func', '_args', '_keywords'))
if len(d) == 0:
d = None
- return (type(self), (self.func,),
- (self.func, self.args, self.keywords, d))
+ return (type(self), (self._func,),
+ (self._func, self._args, self._keywords, d))
def __setstate__(self, state):
- self._func, self._args, self._keywords, d = state
+ func, args, keywords, d = state
if d is not None:
self.__dict__.update(d)
+ self._func = func
+ self._args = args
+ self._keywords = keywords
diff --git a/lib_pypy/_gdbm.py b/lib_pypy/_gdbm.py
--- a/lib_pypy/_gdbm.py
+++ b/lib_pypy/_gdbm.py
@@ -1,4 +1,6 @@
import cffi, os, sys
+import thread
+_lock = thread.allocate_lock()
ffi = cffi.FFI()
ffi.cdef('''
@@ -40,6 +42,7 @@
try:
verify_code = '''
+ #include <stdlib.h>
#include "gdbm.h"
static datum pygdbm_fetch(GDBM_FILE gdbm_file, char *dptr, int dsize) {
@@ -87,59 +90,71 @@
return {'dptr': ffi.new("char[]", key), 'dsize': len(key)}
class gdbm(object):
- ll_dbm = None
+ __ll_dbm = None
+
+ # All public methods need to acquire the lock; all private methods
+ # assume the lock is already held. Thus public methods cannot call
+ # other public methods.
def __init__(self, filename, iflags, mode):
- res = lib.gdbm_open(filename, 0, iflags, mode, ffi.NULL)
- self.size = -1
- if not res:
- self._raise_from_errno()
- self.ll_dbm = res
+ with _lock:
+ res = lib.gdbm_open(filename, 0, iflags, mode, ffi.NULL)
+ self.__size = -1
+ if not res:
+ self.__raise_from_errno()
+ self.__ll_dbm = res
def close(self):
- if self.ll_dbm:
- lib.gdbm_close(self.ll_dbm)
- self.ll_dbm = None
+ with _lock:
+ if self.__ll_dbm:
+ lib.gdbm_close(self.__ll_dbm)
+ self.__ll_dbm = None
- def _raise_from_errno(self):
+ def __raise_from_errno(self):
if ffi.errno:
raise error(ffi.errno, os.strerror(ffi.errno))
raise error(lib.gdbm_errno, lib.gdbm_strerror(lib.gdbm_errno))
def __len__(self):
- if self.size < 0:
- self.size = len(self.keys())
- return self.size
+ with _lock:
+ if self.__size < 0:
+ self.__size = len(self.__keys())
+ return self.__size
def __setitem__(self, key, value):
- self._check_closed()
- self._size = -1
- r = lib.gdbm_store(self.ll_dbm, _fromstr(key), _fromstr(value),
- lib.GDBM_REPLACE)
- if r < 0:
- self._raise_from_errno()
+ with _lock:
+ self.__check_closed()
+ self.__size = -1
+ r = lib.gdbm_store(self.__ll_dbm, _fromstr(key), _fromstr(value),
+ lib.GDBM_REPLACE)
+ if r < 0:
+ self.__raise_from_errno()
def __delitem__(self, key):
- self._check_closed()
- res = lib.gdbm_delete(self.ll_dbm, _fromstr(key))
- if res < 0:
- raise KeyError(key)
+ with _lock:
+ self.__check_closed()
+ self.__size = -1
+ res = lib.gdbm_delete(self.__ll_dbm, _fromstr(key))
+ if res < 0:
+ raise KeyError(key)
def __contains__(self, key):
- self._check_closed()
- key = _checkstr(key)
- return lib.pygdbm_exists(self.ll_dbm, key, len(key))
+ with _lock:
+ self.__check_closed()
+ key = _checkstr(key)
+ return lib.pygdbm_exists(self.__ll_dbm, key, len(key))
has_key = __contains__
def get(self, key, default=None):
- self._check_closed()
- key = _checkstr(key)
- drec = lib.pygdbm_fetch(self.ll_dbm, key, len(key))
- if not drec.dptr:
- return default
- res = bytes(ffi.buffer(drec.dptr, drec.dsize))
- lib.free(drec.dptr)
- return res
+ with _lock:
+ self.__check_closed()
+ key = _checkstr(key)
+ drec = lib.pygdbm_fetch(self.__ll_dbm, key, len(key))
+ if not drec.dptr:
+ return default
+ res = bytes(ffi.buffer(drec.dptr, drec.dsize))
+ lib.free(drec.dptr)
+ return res
def __getitem__(self, key):
value = self.get(key)
@@ -147,47 +162,55 @@
raise KeyError(key)
return value
- def keys(self):
- self._check_closed()
+ def __keys(self):
+ self.__check_closed()
l = []
- key = lib.gdbm_firstkey(self.ll_dbm)
+ key = lib.gdbm_firstkey(self.__ll_dbm)
while key.dptr:
l.append(bytes(ffi.buffer(key.dptr, key.dsize)))
- nextkey = lib.gdbm_nextkey(self.ll_dbm, key)
+ nextkey = lib.gdbm_nextkey(self.__ll_dbm, key)
lib.free(key.dptr)
key = nextkey
return l
+ def keys(self):
+ with _lock:
+ return self.__keys()
+
def firstkey(self):
- self._check_closed()
- key = lib.gdbm_firstkey(self.ll_dbm)
- if key.dptr:
- res = bytes(ffi.buffer(key.dptr, key.dsize))
- lib.free(key.dptr)
- return res
+ with _lock:
+ self.__check_closed()
+ key = lib.gdbm_firstkey(self.__ll_dbm)
+ if key.dptr:
+ res = bytes(ffi.buffer(key.dptr, key.dsize))
+ lib.free(key.dptr)
+ return res
def nextkey(self, key):
- self._check_closed()
- key = lib.gdbm_nextkey(self.ll_dbm, _fromstr(key))
- if key.dptr:
- res = bytes(ffi.buffer(key.dptr, key.dsize))
- lib.free(key.dptr)
- return res
+ with _lock:
+ self.__check_closed()
+ key = lib.gdbm_nextkey(self.__ll_dbm, _fromstr(key))
+ if key.dptr:
+ res = bytes(ffi.buffer(key.dptr, key.dsize))
+ lib.free(key.dptr)
+ return res
def reorganize(self):
- self._check_closed()
- if lib.gdbm_reorganize(self.ll_dbm) < 0:
- self._raise_from_errno()
+ with _lock:
+ self.__check_closed()
+ if lib.gdbm_reorganize(self.__ll_dbm) < 0:
+ self.__raise_from_errno()
- def _check_closed(self):
- if not self.ll_dbm:
+ def __check_closed(self):
+ if not self.__ll_dbm:
raise error(0, "GDBM object has already been closed")
__del__ = close
def sync(self):
- self._check_closed()
- lib.gdbm_sync(self.ll_dbm)
+ with _lock:
+ self.__check_closed()
+ lib.gdbm_sync(self.__ll_dbm)
def setdefault(self, key, default=None):
value = self.get(key)
diff --git a/lib_pypy/greenlet.egg-info b/lib_pypy/greenlet.egg-info
--- a/lib_pypy/greenlet.egg-info
+++ b/lib_pypy/greenlet.egg-info
@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: greenlet
-Version: 0.4.5
+Version: 0.4.6
Summary: Lightweight in-process concurrent programming
Home-page: https://github.com/python-greenlet/greenlet
Author: Ralf Schmitt (for CPython), PyPy team
diff --git a/lib_pypy/greenlet.py b/lib_pypy/greenlet.py
--- a/lib_pypy/greenlet.py
+++ b/lib_pypy/greenlet.py
@@ -2,7 +2,7 @@
import __pypy__
import _continuation
-__version__ = "0.4.5"
+__version__ = "0.4.6"
# ____________________________________________________________
# Exceptions
diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py
--- a/pypy/config/pypyoption.py
+++ b/pypy/config/pypyoption.py
@@ -39,6 +39,10 @@
"_csv", "_pypyjson", "_posixsubprocess", # "cppyy", "micronumpy"
])
+if sys.platform.startswith('linux') and sys.maxint > 2147483647:
+ if 0: # XXX disabled until we fix the absurd .so mess
+ working_modules.add('_vmprof')
+
translation_modules = default_modules.copy()
translation_modules.update([
"fcntl", "time", "select", "signal", "_rawffi", "zlib", "struct",
@@ -101,6 +105,7 @@
"_hashlib" : ["pypy.module._ssl.interp_ssl"],
"_minimal_curses": ["pypy.module._minimal_curses.fficurses"],
"_continuation": ["rpython.rlib.rstacklet"],
+ "_vmprof" : ["pypy.module._vmprof.interp_vmprof"],
}
def get_module_validator(modname):
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -320,6 +320,13 @@
http://bugs.python.org/issue14621, some of us believe it has no
purpose in CPython either.
+* You can't store non-string keys in type objects. For example::
+
+ class A(object):
+ locals()[42] = 3
+
+ won't work.
+
* ``sys.setrecursionlimit(n)`` sets the limit only approximately,
by setting the usable stack space to ``n * 768`` bytes. On Linux,
depending on the compiler settings, the default of 768KB is enough
@@ -361,8 +368,13 @@
opposed to a dict proxy like in CPython. Mutating the dict will change the
type and vice versa. For builtin types, a dictionary will be returned that
cannot be changed (but still looks and behaves like a normal dictionary).
+
+* some functions and attributes of the ``gc`` module behave in a
+ slightly different way: for example, ``gc.enable`` and
+ ``gc.disable`` are supported, but instead of enabling and disabling
+ the GC, they just enable and disable the execution of finalizers.
* PyPy prints a random line from past #pypy IRC topics at startup in
- interactive mode. In a released version, this behaviour is supressed, but
+ interactive mode. In a released version, this behaviour is suppressed, but
setting the environment variable PYPY_IRC_TOPIC will bring it back. Note that
downstream package providers have been known to totally disable this feature.
diff --git a/pypy/doc/embedding.rst b/pypy/doc/embedding.rst
--- a/pypy/doc/embedding.rst
+++ b/pypy/doc/embedding.rst
@@ -51,6 +51,9 @@
otherwise return 0. You should really do your own error handling in the
source. It'll acquire the GIL.
+ Note: this is meant to be called *only once* or a few times at most. See
+ the `more complete example`_ below.
+
.. function:: int pypy_execute_source_ptr(char* source, void* ptr);
.. note:: Not available in PyPy <= 2.2.1
@@ -65,8 +68,9 @@
Note that this function is not thread-safe itself, so you need to guard it
with a mutex.
-Simple example
---------------
+
+Minimal example
+---------------
Note that this API is a lot more minimal than say CPython C API, so at first
it's obvious to think that you can't do much. However, the trick is to do
@@ -78,10 +82,10 @@
.. code-block:: c
- #include "include/PyPy.h"
+ #include "PyPy.h"
#include <stdio.h>
- const char source[] = "print 'hello from pypy'";
+ static char source[] = "print 'hello from pypy'";
int main(void)
{
@@ -103,154 +107,115 @@
If we save it as ``x.c`` now, compile it and run it (on linux) with::
- fijal@hermann:/opt/pypy$ gcc -o x x.c -lpypy-c -L.
- fijal@hermann:/opt/pypy$ LD_LIBRARY_PATH=. ./x
+ $ gcc -g -o x x.c -lpypy-c -L/opt/pypy/bin -I/opt/pypy/include
+ $ LD_LIBRARY_PATH=/opt/pypy/bin ./x
hello from pypy
-on OSX it is necessary to set the rpath of the binary if one wants to link to
it::
+.. note:: If the compilation fails because of missing PyPy.h header file,
+ you are running PyPy <= 2.2.1. Get it here__.
+
+.. __:
https://bitbucket.org/pypy/pypy/raw/c4cd6eca9358066571500ac82aaacfdaa3889e8c/include/PyPy.h
+
+On OSX it is necessary to set the rpath of the binary if one wants to link to
it,
+with a command like::
gcc -o x x.c -lpypy-c -L. -Wl,-rpath -Wl,@executable_path
./x
hello from pypy
-Worked!
-.. note:: If the compilation fails because of missing PyPy.h header file,
- you are running PyPy <= 2.2.1, please see the section `Missing
PyPy.h`_.
-
-Missing PyPy.h
---------------
-
-.. note:: PyPy.h is in the nightly builds and goes to new PyPy releases
(>2.2.1).
-
-For PyPy <= 2.2.1, you can download PyPy.h from PyPy repository (it has been
added in commit c4cd6ec):
-
-.. code-block:: bash
-
- cd /opt/pypy/include
- wget
https://bitbucket.org/pypy/pypy/raw/c4cd6eca9358066571500ac82aaacfdaa3889e8c/include/PyPy.h
-
-
-More advanced example
+More complete example
---------------------
.. note:: This example depends on pypy_execute_source_ptr which is not
available
- in PyPy <= 2.2.1. You might want to see the alternative example
- below.
+ in PyPy <= 2.2.1.
Typically we need something more to do than simply execute source. The
following
is a fully fledged example, please consult cffi documentation for details.
It's a bit longish, but it captures a gist what can be done with the PyPy
embedding interface:
+.. code-block:: python
+
+ # file "interface.py"
+
+ import cffi
+
+ ffi = cffi.FFI()
+ ffi.cdef('''
+ struct API {
+ double (*add_numbers)(double x, double y);
+ };
+ ''')
+
+ # Better define callbacks at module scope, it's important to
+ # keep this object alive.
+ @ffi.callback("double (double, double)")
+ def add_numbers(x, y):
+ return x + y
+
+ def fill_api(ptr):
+ global api
+ api = ffi.cast("struct API*", ptr)
+ api.add_numbers = add_numbers
+
.. code-block:: c
- #include "include/PyPy.h"
+ /* C example */
+ #include "PyPy.h"
#include <stdio.h>
- char source[] = "from cffi import FFI\n\
- ffi = FFI()\n\
- @ffi.callback('int(int)')\n\
- def func(a):\n\
- print 'Got from C %d' % a\n\
- return a * 2\n\
- ffi.cdef('int callback(int (*func)(int));')\n\
- c_func = ffi.cast('int(*)(int(*)(int))', c_argument)\n\
- c_func(func)\n\
- print 'finished the Python part'\n\
- ";
+ struct API {
+ double (*add_numbers)(double x, double y);
+ };
- int callback(int (*func)(int))
+ struct API api; /* global var */
+
+ int initialize_api(void)
{
- printf("Calling to Python, result: %d\n", func(3));
- }
-
- int main()
- {
+ static char source[] =
+ "import sys; sys.path.insert(0, '.'); "
+ "import interface; interface.fill_api(c_argument)";
int res;
- void *lib, *func;
rpython_startup_code();
res = pypy_setup_home("/opt/pypy/bin/libpypy-c.so", 1);
if (res) {
- printf("Error setting pypy home!\n");
+ fprintf(stderr, "Error setting pypy home!\n");
+ return -1;
+ }
+ res = pypy_execute_source_ptr(source, &api);
+ if (res) {
+ fprintf(stderr, "Error calling pypy_execute_source_ptr!\n");
+ return -1;
+ }
+ return 0;
+ }
+
+ int main(void)
+ {
+ if (initialize_api() < 0)
return 1;
- }
- res = pypy_execute_source_ptr(source, (void*)callback);
- if (res) {
- printf("Error calling pypy_execute_source_ptr!\n");
- }
- return res;
+
+ printf("sum: %f\n", api.add_numbers(12.3, 45.6));
+
+ return 0;
}
you can compile and run it with::
- fijal@hermann:/opt/pypy$ gcc -g -o x x.c -lpypy-c -L.
- fijal@hermann:/opt/pypy$ LD_LIBRARY_PATH=. ./x
- Got from C 3
- Calling to Python, result: 6
- finished the Python part
+ $ gcc -g -o x x.c -lpypy-c -L/opt/pypy/bin -I/opt/pypy/include
+ $ LD_LIBRARY_PATH=/opt/pypy/bin ./x
+ sum: 57.900000
-As you can see, we successfully managed to call Python from C and C from
-Python. Now having one callback might not be enough, so what typically happens
-is that we would pass a struct full of callbacks to ``pypy_execute_source_ptr``
-and fill the structure from Python side for the future use.
+As you can see, what we did is create a ``struct API`` that contains
+the custom API that we need in our particular case. This struct is
+filled by Python to contain a function pointer that is then called
+form the C side. It is also possible to do have other function
+pointers that are filled by the C side and called by the Python side,
+or even non-function-pointer fields: basically, the two sides
+communicate via this single C structure that defines your API.
-Alternative example
--------------------
-
-As ``pypy_execute_source_ptr`` is not available in PyPy 2.2.1, you might want
to try
-an alternative approach which relies on -export-dynamic flag to the GNU
linker.
-The downside to this approach is that it is platform dependent.
-
-.. code-block:: c
-
- #include "include/PyPy.h"
- #include <stdio.h>
-
- char source[] = "from cffi import FFI\n\
- ffi = FFI()\n\
- @ffi.callback('int(int)')\n\
- def func(a):\n\
- print 'Got from C %d' % a\n\
- return a * 2\n\
- ffi.cdef('int callback(int (*func)(int));')\n\
- lib = ffi.verify('int callback(int (*func)(int));')\n\
- lib.callback(func)\n\
- print 'finished the Python part'\n\
- ";
-
- int callback(int (*func)(int))
- {
- printf("Calling to Python, result: %d\n", func(3));
- }
-
- int main()
- {
- int res;
- void *lib, *func;
-
- rpython_startup_code();
- res = pypy_setup_home("/opt/pypy/bin/libpypy-c.so", 1);
- if (res) {
- printf("Error setting pypy home!\n");
- return 1;
- }
- res = pypy_execute_source(source);
- if (res) {
- printf("Error calling pypy_execute_source!\n");
- }
- return res;
- }
-
-
-Make sure to pass -export-dynamic flag when compiling::
-
- $ gcc -g -o x x.c -lpypy-c -L. -export-dynamic
- $ LD_LIBRARY_PATH=. ./x
- Got from C 3
- Calling to Python, result: 6
- finished the Python part
Finding pypy_home
-----------------
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -59,6 +59,7 @@
exactly like `f(a, b)`.
.. branch: issue2018
+
branch issue2018:
Allow prebuilt rpython dict with function values
@@ -66,4 +67,50 @@
.. Merged but then backed out, hopefully it will return as vmprof2
.. branch: object-dtype2
+
+branch object-dtype2:
Extend numpy dtypes to allow using objects with associated garbage collection
hook
+
+.. branch: vmprof2
+
+branch vmprof2:
+Add backend support for vmprof - a lightweight statistical profiler -
+to linux64, see client at https://vmprof.readthedocs.org
+
+.. branch: jit_hint_docs
+
+branch jit_hint_docs:
+Add more detail to @jit.elidable and @jit.promote in rpython/rlib/jit.py
+
+.. branch: remove-frame-debug-attrs
+
+branch remove_frame-debug-attrs:
+Remove the debug attributes from frames only used for tracing and replace
+them with a debug object that is created on-demand
+
+.. branch: can_cast
+
+branch can_cast:
+Implement np.can_cast, np.min_scalar_type and missing dtype comparison
operations.
+
+.. branch: numpy-fixes
+
+branch numpy-fixes:
+Fix some error related to object dtype, non-contiguous arrays, inplement parts
of
+__array_interface__, __array_priority__, __array_wrap__
+
+.. branch: cells-local-stack
+
+branch cells-local-stack:
+Unify the PyFrame.cells and Pyframe.locals_stack_w lists, making frame objects
+1 or 3 words smaller.
+
+.. branch: pythonoptimize-env
+
+branch pythonoptimize-env
+Implement PYTHONOPTIMIZE environment variable, fixing issue #2044
+
+.. branch: numpy-flags
+
+branch numpy-flags
+Finish implementation of ndarray.flags, including str() and repr()
diff --git a/pypy/goal/pypy.ico b/pypy/goal/pypy.ico
new file mode 100644
index
0000000000000000000000000000000000000000..09d07dcc5a783200f440c68c0987926a80d6b667
GIT binary patch
[cut]
diff --git a/pypy/goal/targetpypystandalone.py
b/pypy/goal/targetpypystandalone.py
--- a/pypy/goal/targetpypystandalone.py
+++ b/pypy/goal/targetpypystandalone.py
@@ -245,6 +245,7 @@
config.translation.suggest(check_str_without_nul=True)
config.translation.suggest(shared=True)
+ config.translation.suggest(icon=os.path.join(this_dir, 'pypy.ico'))
if config.translation.shared:
if config.translation.output is not None:
raise Exception("Cannot use the --output option with PyPy "
diff --git a/pypy/interpreter/app_main.py b/pypy/interpreter/app_main.py
--- a/pypy/interpreter/app_main.py
+++ b/pypy/interpreter/app_main.py
@@ -13,7 +13,7 @@
-i : inspect interactively after running script; forces a prompt even
if stdin does not appear to be a terminal; also PYTHONINSPECT=x
-m mod : run library module as a script (terminates option list)
--O : skip assert statements
+-O : skip assert statements; also PYTHONOPTIMIZE=x
-OO : remove docstrings when importing modules in addition to -O
-q : don't print version and copyright messages on interactive startup
-R : ignored (see http://bugs.python.org/issue14621)
@@ -419,6 +419,21 @@
return function(options, funcarg, iterargv)
+def parse_env(name, key, options):
+ ''' Modify options inplace if name exists in os.environ
+ '''
+ import os
+ v = os.getenv(name)
+ if v:
+ options[key] = max(1, options[key])
+ try:
+ newval = int(v)
+ except ValueError:
+ pass
+ else:
+ newval = max(1, newval)
+ options[key] = max(options[key], newval)
+
def parse_command_line(argv):
import os
options = default_options.copy()
@@ -461,17 +476,15 @@
sys.argv[:] = argv
if not options["ignore_environment"]:
- if os.getenv('PYTHONDEBUG'):
- options["debug"] = 1
+ parse_env('PYTHONDEBUG', "debug", options)
if os.getenv('PYTHONDONTWRITEBYTECODE'):
options["dont_write_bytecode"] = 1
if os.getenv('PYTHONNOUSERSITE'):
options["no_user_site"] = 1
if os.getenv('PYTHONUNBUFFERED'):
options["unbuffered"] = 1
- if os.getenv('PYTHONVERBOSE'):
- options["verbose"] = 1
-
+ parse_env('PYTHONVERBOSE', "verbose", options)
+ parse_env('PYTHONOPTIMIZE', "optimize", options)
if (options["interactive"] or
(not options["ignore_environment"] and os.getenv('PYTHONINSPECT'))):
options["inspect"] = 1
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -12,7 +12,7 @@
INT_MIN, INT_MAX, UINT_MAX, USHRT_MAX
from pypy.interpreter.executioncontext import (ExecutionContext, ActionFlag,
- UserDelAction)
+ UserDelAction, CodeUniqueIds)
from pypy.interpreter.error import OperationError, new_exception_class, oefmt
from pypy.interpreter.argument import Arguments
from pypy.interpreter.miscutils import ThreadLocals, make_weak_value_dictionary
@@ -395,6 +395,7 @@
self.actionflag = ActionFlag() # changed by the signal module
self.check_signal_action = None # changed by the signal module
self.user_del_action = UserDelAction(self)
+ self.code_unique_ids = CodeUniqueIds()
self._code_of_sys_exc_info = None
# can be overridden to a subclass
@@ -684,6 +685,16 @@
assert ec is not None
return ec
+ def register_code_callback(self, callback):
+ cui = self.code_unique_ids
+ cui.code_callback = callback
+
+ def register_code_object(self, pycode):
+ cui = self.code_unique_ids
+ if cui.code_callback is None:
+ return
+ cui.code_callback(self, pycode)
+
def _freeze_(self):
return True
@@ -1093,7 +1104,7 @@
def call_valuestack(self, w_func, nargs, frame):
from pypy.interpreter.function import Function, Method, is_builtin_code
- if frame.is_being_profiled and is_builtin_code(w_func):
+ if frame.get_is_being_profiled() and is_builtin_code(w_func):
# XXX: this code is copied&pasted :-( from the slow path below
# call_valuestack().
args = frame.make_arguments(nargs)
diff --git a/pypy/interpreter/executioncontext.py
b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -96,7 +96,7 @@
def _c_call_return_trace(self, frame, w_func, args, event):
if self.profilefunc is None:
- frame.is_being_profiled = False
+ frame.getorcreatedebug().is_being_profiled = False
else:
# undo the effect of the CALL_METHOD bytecode, which would be
# that even on a built-in method call like '[].append()',
@@ -114,7 +114,7 @@
def c_exception_trace(self, frame, w_exc):
"Profile function called upon OperationError."
if self.profilefunc is None:
- frame.is_being_profiled = False
+ frame.getorcreatedebug().is_being_profiled = False
else:
self._trace(frame, 'c_exception', w_exc)
@@ -123,7 +123,7 @@
if self.gettrace() is not None or self.profilefunc is not None:
self._trace(frame, 'call', self.space.w_None)
if self.profilefunc:
- frame.is_being_profiled = True
+ frame.getorcreatedebug().is_being_profiled = True
def return_trace(self, frame, w_retval):
"Trace the return from a function"
@@ -145,7 +145,7 @@
Like bytecode_trace() but doesn't invoke any other events besides the
trace function.
"""
- if (frame.w_f_trace is None or self.is_tracing or
+ if (frame.get_w_f_trace() is None or self.is_tracing or
self.gettrace() is None):
return
self.run_trace_func(frame)
@@ -154,8 +154,9 @@
@jit.unroll_safe
def run_trace_func(self, frame):
code = frame.pycode
- if frame.instr_lb <= frame.last_instr < frame.instr_ub:
- if frame.last_instr < frame.instr_prev_plus_one:
+ d = frame.getorcreatedebug()
+ if d.instr_lb <= frame.last_instr < d.instr_ub:
+ if frame.last_instr < d.instr_prev_plus_one:
# We jumped backwards in the same line.
self._trace(frame, 'line', self.space.w_None)
else:
@@ -170,7 +171,7 @@
break
addr += c
if c:
- frame.instr_lb = addr
+ d.instr_lb = addr
line += ord(lineno[p + 1])
p += 2
@@ -185,15 +186,15 @@
if ord(lineno[p + 1]):
break
p += 2
- frame.instr_ub = addr
+ d.instr_ub = addr
else:
- frame.instr_ub = sys.maxint
+ d.instr_ub = sys.maxint
- if frame.instr_lb == frame.last_instr: # At start of line!
- frame.f_lineno = line
+ if d.instr_lb == frame.last_instr: # At start of line!
+ d.f_lineno = line
self._trace(frame, 'line', self.space.w_None)
- frame.instr_prev_plus_one = frame.last_instr + 1
+ d.instr_prev_plus_one = frame.last_instr + 1
def bytecode_trace_after_exception(self, frame):
"Like bytecode_trace(), but without increasing the ticker."
@@ -291,7 +292,7 @@
frame = self.gettopframe_nohidden()
while frame:
if is_being_profiled:
- frame.is_being_profiled = True
+ frame.getorcreatedebug().is_being_profiled = True
frame = self.getnextframe_nohidden(frame)
def call_tracing(self, w_func, w_args):
@@ -312,7 +313,7 @@
if event == 'call':
w_callback = self.gettrace()
else:
- w_callback = frame.w_f_trace
+ w_callback = frame.get_w_f_trace()
if w_callback is not None and event != "leaveframe":
if operr is not None:
@@ -323,15 +324,16 @@
frame.fast2locals()
self.is_tracing += 1
try:
+ d = frame.getorcreatedebug()
try:
w_result = space.call_function(w_callback,
space.wrap(frame), space.wrap(event), w_arg)
if space.is_w(w_result, space.w_None):
- frame.w_f_trace = None
+ d.w_f_trace = None
else:
- frame.w_f_trace = w_result
+ d.w_f_trace = w_result
except:
self.settrace(space.w_None)
- frame.w_f_trace = None
+ d.w_f_trace = None
raise
finally:
self.is_tracing -= 1
@@ -586,3 +588,11 @@
# there is no list of length n: if n is large, then the GC
# will run several times while walking the list, but it will
# see lower and lower memory usage, with no lower bound of n.
+
+class CodeUniqueIds(object):
+ def __init__(self):
+ if sys.maxint == 2147483647:
+ self.code_unique_id = 0 # XXX this is wrong, it won't work on 32bit
+ else:
+ self.code_unique_id = 0x7000000000000000
+ self.code_callback = None
diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py
--- a/pypy/interpreter/function.py
+++ b/pypy/interpreter/function.py
@@ -108,7 +108,7 @@
self)
for i in funccallunrolling:
if i < nargs:
- new_frame.locals_stack_w[i] = args_w[i]
+ new_frame.locals_cells_stack_w[i] = args_w[i]
return new_frame.run()
elif nargs >= 1 and fast_natural_arity == Code.PASSTHROUGHARGS1:
assert isinstance(code, gateway.BuiltinCodePassThroughArguments1)
@@ -174,7 +174,7 @@
self)
for i in xrange(nargs):
w_arg = frame.peekvalue(nargs-1-i)
- new_frame.locals_stack_w[i] = w_arg
+ new_frame.locals_cells_stack_w[i] = w_arg
return new_frame.run()
@@ -185,13 +185,13 @@
self)
for i in xrange(nargs):
w_arg = frame.peekvalue(nargs-1-i)
- new_frame.locals_stack_w[i] = w_arg
+ new_frame.locals_cells_stack_w[i] = w_arg
ndefs = len(self.defs_w)
start = ndefs - defs_to_load
i = nargs
for j in xrange(start, ndefs):
- new_frame.locals_stack_w[i] = self.defs_w[j]
+ new_frame.locals_cells_stack_w[i] = self.defs_w[j]
i += 1
return new_frame.run()
diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
--- a/pypy/interpreter/pycode.py
+++ b/pypy/interpreter/pycode.py
@@ -71,7 +71,8 @@
"CPython-style code objects."
_immutable_ = True
_immutable_fields_ = ["co_consts_w[*]", "co_names_w[*]", "co_varnames[*]",
- "co_freevars[*]", "co_cellvars[*]",
"_args_as_cellvars[*]"]
+ "co_freevars[*]", "co_cellvars[*]",
+ "_args_as_cellvars[*]"]
def __init__(self, space, argcount, kwonlyargcount, nlocals, stacksize,
flags,
code, consts, names, varnames, filename,
@@ -104,6 +105,7 @@
self.magic = magic
self._signature = cpython_code_signature(self)
self._initialize()
+ space.register_code_object(self)
def _initialize(self):
if self.co_cellvars:
@@ -146,6 +148,15 @@
from pypy.objspace.std.mapdict import init_mapdict_cache
init_mapdict_cache(self)
+ cui = self.space.code_unique_ids
+ self._unique_id = cui.code_unique_id
+ cui.code_unique_id += 4 # so we have two bits that we can mark stuff
+ # with
+
+ def _get_full_name(self):
+ return "py:%s:%d:%s" % (self.co_name, self.co_firstlineno,
+ self.co_filename)
+
def _cleanup_(self):
if (self.magic == cpython_magic and
'__pypy__' not in sys.builtin_module_names):
@@ -222,7 +233,7 @@
# speed hack
fresh_frame = jit.hint(frame, access_directly=True,
fresh_virtualizable=True)
- args.parse_into_scope(None, fresh_frame.locals_stack_w, func.name,
+ args.parse_into_scope(None, fresh_frame.locals_cells_stack_w,
func.name,
sig, func.defs_w, func.w_kw_defs)
fresh_frame.init_cells()
return frame.run()
@@ -234,7 +245,7 @@
# speed hack
fresh_frame = jit.hint(frame, access_directly=True,
fresh_virtualizable=True)
- args.parse_into_scope(w_obj, fresh_frame.locals_stack_w, func.name,
+ args.parse_into_scope(w_obj, fresh_frame.locals_cells_stack_w,
func.name,
sig, func.defs_w, func.w_kw_defs)
fresh_frame.init_cells()
return frame.run()
diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py
--- a/pypy/interpreter/pyframe.py
+++ b/pypy/interpreter/pyframe.py
@@ -23,6 +23,19 @@
globals()[op] = stdlib_opcode.opmap[op]
HAVE_ARGUMENT = stdlib_opcode.HAVE_ARGUMENT
+class FrameDebugData(object):
+ """ A small object that holds debug data for tracing
+ """
+ w_f_trace = None
+ instr_lb = 0
+ instr_ub = 0
+ instr_prev_plus_one = 0
+ f_lineno = 0 # current lineno for tracing
+ is_being_profiled = False
+ w_locals = None
+
+ def __init__(self, pycode):
+ self.f_lineno = pycode.co_firstlineno
class PyFrame(W_Root):
"""Represents a frame for a regular Python function
@@ -31,7 +44,8 @@
Public fields:
* 'space' is the object space this frame is running in
* 'code' is the PyCode object this frame runs
- * 'w_locals' is the locals dictionary to use
+ * 'w_locals' is the locals dictionary to use, if needed, stored on a
+ debug object
* 'w_globals' is the attached globals dictionary
* 'builtin' is the attached built-in module
* 'valuestack_w', 'blockstack', control the interpretation
@@ -49,13 +63,25 @@
last_instr = -1
last_exception = None
f_backref = jit.vref_None
- w_f_trace = None
- # For tracing
- instr_lb = 0
- instr_ub = 0
- instr_prev_plus_one = 0
- is_being_profiled = False
+
escaped = False # see mark_as_escaped()
+ debugdata = None
+
+ w_globals = None
+ pycode = None # code object executed by that frame
+ locals_cells_stack_w = None # the list of all locals, cells and the
valuestack
+ valuestackdepth = 0 # number of items on valuestack
+ lastblock = None
+
+ # other fields:
+
+ # builtin - builtin cache, only if honor__builtins__ is True
+ # defaults to False
+
+ # there is also self.space which is removed by the annotator
+
+ # additionally JIT uses vable_token field that is representing
+ # frame current virtualizable state as seen by the JIT
def __init__(self, space, code, w_globals, outer_func):
if not we_are_translated():
@@ -65,12 +91,15 @@
assert isinstance(code, pycode.PyCode)
self.space = space
self.w_globals = w_globals
- self.w_locals = None
self.pycode = code
- self.locals_stack_w = [None] * (code.co_nlocals + code.co_stacksize)
- self.valuestackdepth = code.co_nlocals
- self.lastblock = None
- make_sure_not_resized(self.locals_stack_w)
+ ncellvars = len(code.co_cellvars)
+ nfreevars = len(code.co_freevars)
+ size = code.co_nlocals + ncellvars + nfreevars + code.co_stacksize
+ # the layout of this list is as follows:
+ # | local vars | cells | stack |
+ self.locals_cells_stack_w = [None] * size
+ self.valuestackdepth = code.co_nlocals + ncellvars + nfreevars
+ make_sure_not_resized(self.locals_cells_stack_w)
check_nonneg(self.valuestackdepth)
#
if space.config.objspace.honor__builtins__:
@@ -78,7 +107,32 @@
# regular functions always have CO_OPTIMIZED and CO_NEWLOCALS.
# class bodies only have CO_NEWLOCALS.
self.initialize_frame_scopes(outer_func, code)
- self.f_lineno = code.co_firstlineno
+
+ def getdebug(self):
+ return self.debugdata
+
+ def getorcreatedebug(self):
+ if self.debugdata is None:
+ self.debugdata = FrameDebugData(self.pycode)
+ return self.debugdata
+
+ def get_w_f_trace(self):
+ d = self.getdebug()
+ if d is None:
+ return None
+ return d.w_f_trace
+
+ def get_is_being_profiled(self):
+ d = self.getdebug()
+ if d is None:
+ return False
+ return d.is_being_profiled
+
+ def get_w_locals(self):
+ d = self.getdebug()
+ if d is None:
+ return None
+ return d.w_locals
def __repr__(self):
# NOT_RPYTHON: useful in tracebacks
@@ -86,6 +140,11 @@
self.__class__.__module__, self.__class__.__name__,
self.pycode, self.get_last_lineno())
+ def _getcell(self, varindex):
+ cell = self.locals_cells_stack_w[varindex + self.pycode.co_nlocals]
+ assert isinstance(cell, Cell)
+ return cell
+
def mark_as_escaped(self):
"""
Must be called on frames that are exposed to applevel, e.g. by
@@ -131,8 +190,6 @@
else:
return self.space.builtin
- _NO_CELLS = []
-
@jit.unroll_safe
def initialize_frame_scopes(self, outer_func, code):
# regular functions always have CO_OPTIMIZED and CO_NEWLOCALS.
@@ -142,17 +199,16 @@
flags = code.co_flags
if not (flags & pycode.CO_OPTIMIZED):
if flags & pycode.CO_NEWLOCALS:
- self.w_locals = self.space.newdict(module=True)
+ self.getorcreatedebug().w_locals =
self.space.newdict(module=True)
else:
assert self.w_globals is not None
- self.w_locals = self.w_globals
+ self.getorcreatedebug().w_locals = self.w_globals
ncellvars = len(code.co_cellvars)
nfreevars = len(code.co_freevars)
if not nfreevars:
if not ncellvars:
- self.cells = self._NO_CELLS
- return # no self.cells needed - fast path
+ return # no cells needed - fast path
elif outer_func is None:
space = self.space
raise OperationError(space.w_TypeError,
@@ -165,11 +221,13 @@
if closure_size != nfreevars:
raise ValueError("code object received a closure with "
"an unexpected number of free variables")
- self.cells = [None] * (ncellvars + nfreevars)
+ index = code.co_nlocals
for i in range(ncellvars):
- self.cells[i] = Cell()
+ self.locals_cells_stack_w[index] = Cell()
+ index += 1
for i in range(nfreevars):
- self.cells[i + ncellvars] = outer_func.closure[i]
+ self.locals_cells_stack_w[index] = outer_func.closure[i]
+ index += 1
def run(self):
"""Start this frame's execution."""
@@ -233,14 +291,24 @@
# stack manipulation helpers
def pushvalue(self, w_object):
depth = self.valuestackdepth
- self.locals_stack_w[depth] = w_object
+ self.locals_cells_stack_w[depth] = w_object
self.valuestackdepth = depth + 1
+ def _check_stack_index(self, index):
+ # will be completely removed by the optimizer if only used in an assert
+ # and if asserts are disabled
+ code = self.pycode
+ ncellvars = len(code.co_cellvars)
+ nfreevars = len(code.co_freevars)
+ stackstart = code.co_nlocals + ncellvars + nfreevars
+ return index >= stackstart
+
def popvalue(self):
depth = self.valuestackdepth - 1
- assert depth >= self.pycode.co_nlocals, "pop from empty value stack"
- w_object = self.locals_stack_w[depth]
- self.locals_stack_w[depth] = None
+ assert self._check_stack_index(depth)
+ assert depth >= 0
+ w_object = self.locals_cells_stack_w[depth]
+ self.locals_cells_stack_w[depth] = None
self.valuestackdepth = depth
return w_object
@@ -266,25 +334,26 @@
def peekvalues(self, n):
values_w = [None] * n
base = self.valuestackdepth - n
- assert base >= self.pycode.co_nlocals
+ assert self._check_stack_index(base)
+ assert base >= 0
while True:
n -= 1
if n < 0:
break
- values_w[n] = self.locals_stack_w[base+n]
+ values_w[n] = self.locals_cells_stack_w[base+n]
return values_w
@jit.unroll_safe
def dropvalues(self, n):
n = hint(n, promote=True)
finaldepth = self.valuestackdepth - n
- assert finaldepth >= self.pycode.co_nlocals, (
- "stack underflow in dropvalues()")
+ assert self._check_stack_index(finaldepth)
+ assert finaldepth >= 0
while True:
n -= 1
if n < 0:
break
- self.locals_stack_w[finaldepth+n] = None
+ self.locals_cells_stack_w[finaldepth+n] = None
self.valuestackdepth = finaldepth
@jit.unroll_safe
@@ -311,34 +380,27 @@
# Contrast this with CPython where it's PEEK(-1).
index_from_top = hint(index_from_top, promote=True)
index = self.valuestackdepth + ~index_from_top
- assert index >= self.pycode.co_nlocals, (
- "peek past the bottom of the stack")
- return self.locals_stack_w[index]
+ assert self._check_stack_index(index)
+ assert index >= 0
+ return self.locals_cells_stack_w[index]
def settopvalue(self, w_object, index_from_top=0):
index_from_top = hint(index_from_top, promote=True)
index = self.valuestackdepth + ~index_from_top
- assert index >= self.pycode.co_nlocals, (
- "settop past the bottom of the stack")
- self.locals_stack_w[index] = w_object
+ assert self._check_stack_index(index)
+ assert index >= 0
+ self.locals_cells_stack_w[index] = w_object
@jit.unroll_safe
def dropvaluesuntil(self, finaldepth):
depth = self.valuestackdepth - 1
finaldepth = hint(finaldepth, promote=True)
+ assert finaldepth >= 0
while depth >= finaldepth:
- self.locals_stack_w[depth] = None
+ self.locals_cells_stack_w[depth] = None
depth -= 1
self.valuestackdepth = finaldepth
- def save_locals_stack(self):
- return self.locals_stack_w[:self.valuestackdepth]
-
- def restore_locals_stack(self, items_w):
- self.locals_stack_w[:len(items_w)] = items_w
- self.init_cells()
- self.dropvaluesuntil(len(items_w))
-
def make_arguments(self, nargs):
return Arguments(self.space, self.peekvalues(nargs))
@@ -361,24 +423,16 @@
w = space.wrap
nt = space.newtuple
- cells = self.cells
- if cells is None:
- w_cells = space.w_None
- else:
- w_cells = space.newlist([space.wrap(cell) for cell in cells])
-
- if self.w_f_trace is None:
+ if self.get_w_f_trace() is None:
f_lineno = self.get_last_lineno()
else:
- f_lineno = self.f_lineno
+ f_lineno = self.getorcreatedebug().f_lineno
nlocals = self.pycode.co_nlocals
- values_w = self.locals_stack_w[nlocals:self.valuestackdepth]
- w_valuestack = maker.slp_into_tuple_with_nulls(space, values_w)
+ values_w = self.locals_cells_stack_w
+ w_locals_cells_stack = maker.slp_into_tuple_with_nulls(space, values_w)
w_blockstack = nt([block._get_state_(space) for block in
self.get_blocklist()])
- w_fastlocals = maker.slp_into_tuple_with_nulls(
- space, self.locals_stack_w[:nlocals])
if self.last_exception is None:
w_exc_value = space.w_None
w_tb = space.w_None
@@ -386,11 +440,12 @@
w_exc_value = self.last_exception.get_w_value(space)
w_tb = w(self.last_exception.get_traceback())
+ d = self.getorcreatedebug()
tup_state = [
w(self.f_backref()),
w(self.get_builtin()),
w(self.pycode),
- w_valuestack,
+ w_locals_cells_stack,
w_blockstack,
w_exc_value, # last_exception
w_tb, #
@@ -398,16 +453,15 @@
w(self.last_instr),
w(self.frame_finished_execution),
w(f_lineno),
- w_fastlocals,
space.w_None, #XXX placeholder for f_locals
#f_restricted requires no additional data!
- space.w_None, ## self.w_f_trace, ignore for now
+ space.w_None,
- w(self.instr_lb), #do we need these three (that are for tracing)
- w(self.instr_ub),
- w(self.instr_prev_plus_one),
- w_cells,
+ w(d.instr_lb),
+ w(d.instr_ub),
+ w(d.instr_prev_plus_one),
+ w(self.valuestackdepth),
]
return nt(tup_state)
@@ -416,24 +470,20 @@
from pypy.module._pickle_support import maker # helper fns
from pypy.interpreter.pycode import PyCode
from pypy.interpreter.module import Module
- args_w = space.unpackiterable(w_args, 18)
- w_f_back, w_builtin, w_pycode, w_valuestack, w_blockstack,
w_exc_value, w_tb,\
- w_globals, w_last_instr, w_finished, w_f_lineno, w_fastlocals,
w_f_locals, \
- w_f_trace, w_instr_lb, w_instr_ub, w_instr_prev_plus_one, w_cells
= args_w
+ args_w = space.unpackiterable(w_args, 17)
+ w_f_back, w_builtin, w_pycode, w_locals_cells_stack, w_blockstack,
w_exc_value, w_tb,\
+ w_globals, w_last_instr, w_finished, w_f_lineno, w_f_locals, \
+ w_f_trace, w_instr_lb, w_instr_ub, w_instr_prev_plus_one,
w_stackdepth = args_w
new_frame = self
pycode = space.interp_w(PyCode, w_pycode)
- if space.is_w(w_cells, space.w_None):
- closure = None
- cellvars = []
- else:
- from pypy.interpreter.nestedscope import Cell
- cells_w = space.unpackiterable(w_cells)
- cells = [space.interp_w(Cell, w_cell) for w_cell in cells_w]
- ncellvars = len(pycode.co_cellvars)
- cellvars = cells[:ncellvars]
- closure = cells[ncellvars:]
+ values_w = maker.slp_from_tuple_with_nulls(space, w_locals_cells_stack)
+ nfreevars = len(pycode.co_freevars)
+ closure = None
+ if nfreevars:
+ base = pycode.co_nlocals + len(pycode.co_cellvars)
+ closure = values_w[base: base + nfreevars]
# do not use the instance's __init__ but the base's, because we set
# everything like cells from here
@@ -451,9 +501,12 @@
assert space.interp_w(Module, w_builtin) is space.builtin
new_frame.set_blocklist([unpickle_block(space, w_blk)
for w_blk in
space.unpackiterable(w_blockstack)])
- values_w = maker.slp_from_tuple_with_nulls(space, w_valuestack)
- for w_value in values_w:
- new_frame.pushvalue(w_value)
+ self.locals_cells_stack_w = values_w[:]
+ valuestackdepth = space.int_w(w_stackdepth)
+ if not self._check_stack_index(valuestackdepth):
+ raise OperationError(space.w_ValueError, space.wrap("invalid
stackdepth"))
+ assert valuestackdepth >= 0
+ self.valuestackdepth = valuestackdepth
if space.is_w(w_exc_value, space.w_None):
new_frame.last_exception = None
else:
@@ -464,20 +517,17 @@
)
new_frame.last_instr = space.int_w(w_last_instr)
new_frame.frame_finished_execution = space.is_true(w_finished)
- new_frame.f_lineno = space.int_w(w_f_lineno)
- fastlocals_w = maker.slp_from_tuple_with_nulls(space, w_fastlocals)
- new_frame.locals_stack_w[:len(fastlocals_w)] = fastlocals_w
+ d = new_frame.getorcreatedebug()
+ d.f_lineno = space.int_w(w_f_lineno)
if space.is_w(w_f_trace, space.w_None):
- new_frame.w_f_trace = None
+ d.w_f_trace = None
else:
- new_frame.w_f_trace = w_f_trace
+ d.w_f_trace = w_f_trace
- new_frame.instr_lb = space.int_w(w_instr_lb) #the three for tracing
- new_frame.instr_ub = space.int_w(w_instr_ub)
- new_frame.instr_prev_plus_one = space.int_w(w_instr_prev_plus_one)
-
- self._setcellvars(cellvars)
+ d.instr_lb = space.int_w(w_instr_lb) #the three for tracing
+ d.instr_ub = space.int_w(w_instr_ub)
+ d.instr_prev_plus_one = space.int_w(w_instr_prev_plus_one)
def hide(self):
return self.pycode.hidden_applevel
@@ -492,10 +542,10 @@
scope_len = len(scope_w)
if scope_len > self.pycode.co_nlocals:
raise ValueError, "new fastscope is longer than the allocated area"
- # don't assign directly to 'locals_stack_w[:scope_len]' to be
+ # don't assign directly to 'locals_cells_stack_w[:scope_len]' to be
# virtualizable-friendly
for i in range(scope_len):
- self.locals_stack_w[i] = scope_w[i]
+ self.locals_cells_stack_w[i] = scope_w[i]
self.init_cells()
def getdictscope(self):
@@ -503,30 +553,31 @@
Get the locals as a dictionary
"""
self.fast2locals()
- return self.w_locals
+ return self.debugdata.w_locals
def setdictscope(self, w_locals):
"""
Initialize the locals from a dictionary.
"""
- self.w_locals = w_locals
+ self.getorcreatedebug().w_locals = w_locals
self.locals2fast()
@jit.unroll_safe
def fast2locals(self):
# Copy values from the fastlocals to self.w_locals
- if self.w_locals is None:
- self.w_locals = self.space.newdict()
+ d = self.getorcreatedebug()
+ if d.w_locals is None:
+ d.w_locals = self.space.newdict()
varnames = self.getcode().getvarnames()
for i in range(min(len(varnames), self.getcode().co_nlocals)):
name = varnames[i]
- w_value = self.locals_stack_w[i]
+ w_value = self.locals_cells_stack_w[i]
if w_value is not None:
- self.space.setitem_str(self.w_locals, name, w_value)
+ self.space.setitem_str(d.w_locals, name, w_value)
else:
w_name = self.space.wrap(name.decode('utf-8'))
try:
- self.space.delitem(self.w_locals, w_name)
+ self.space.delitem(d.w_locals, w_name)
except OperationError as e:
if not e.match(self.space, self.space.w_KeyError):
raise
@@ -539,19 +590,20 @@
freevarnames = freevarnames + self.pycode.co_freevars
for i in range(len(freevarnames)):
name = freevarnames[i]
- cell = self.cells[i]
+ cell = self._getcell(i)
try:
w_value = cell.get()
except ValueError:
pass
else:
- self.space.setitem_str(self.w_locals, name, w_value)
+ self.space.setitem_str(d.w_locals, name, w_value)
@jit.unroll_safe
def locals2fast(self):
# Copy values from self.w_locals to the fastlocals
- assert self.w_locals is not None
+ w_locals = self.getorcreatedebug().w_locals
+ assert w_locals is not None
varnames = self.getcode().getvarnames()
numlocals = self.getcode().co_nlocals
@@ -559,7 +611,7 @@
for i in range(min(len(varnames), numlocals)):
name = varnames[i]
- w_value = self.space.finditem_str(self.w_locals, name)
+ w_value = self.space.finditem_str(w_locals, name)
if w_value is not None:
new_fastlocals_w[i] = w_value
@@ -577,32 +629,29 @@
# into the locals dict used by the class.
for i in range(len(freevarnames)):
name = freevarnames[i]
- cell = self.cells[i]
- w_value = self.space.finditem_str(self.w_locals, name)
+ cell = self._getcell(i)
+ w_value = self.space.finditem_str(w_locals, name)
if w_value is not None:
cell.set(w_value)
@jit.unroll_safe
def init_cells(self):
"""
- Initialize cellvars from self.locals_stack_w.
+ Initialize cellvars from self.locals_cells_stack_w.
"""
args_to_copy = self.pycode._args_as_cellvars
+ index = self.pycode.co_nlocals
for i in range(len(args_to_copy)):
argnum = args_to_copy[i]
if argnum >= 0:
- self.cells[i].set(self.locals_stack_w[argnum])
+ cell = self.locals_cells_stack_w[index]
+ assert isinstance(cell, Cell)
+ cell.set(self.locals_cells_stack_w[argnum])
+ index += 1
def getclosure(self):
return None
- def _setcellvars(self, cellvars):
- ncellvars = len(self.pycode.co_cellvars)
- if len(cellvars) != ncellvars:
- raise OperationError(self.space.w_TypeError,
- self.space.wrap("bad cellvars"))
- self.cells[:ncellvars] = cellvars
-
def fget_code(self, space):
return space.wrap(self.getcode())
@@ -613,10 +662,10 @@
def fget_f_lineno(self, space):
"Returns the line number of the instruction currently being executed."
- if self.w_f_trace is None:
+ if self.get_w_f_trace() is None:
return space.wrap(self.get_last_lineno())
else:
- return space.wrap(self.f_lineno)
+ return space.wrap(self.getorcreatedebug().f_lineno)
def fset_f_lineno(self, space, w_new_lineno):
"Returns the line number of the instruction currently being executed."
@@ -626,7 +675,7 @@
raise OperationError(space.w_ValueError,
space.wrap("lineno must be an integer"))
- if self.w_f_trace is None:
+ if self.get_w_f_trace() is None:
raise OperationError(space.w_ValueError,
space.wrap("f_lineno can only be set by a trace function."))
@@ -745,7 +794,7 @@
block.cleanup(self)
f_iblock -= 1
- self.f_lineno = new_lineno
+ self.getorcreatedebug().f_lineno = new_lineno
self.last_instr = new_lasti
def get_last_lineno(self):
@@ -763,17 +812,18 @@
return self.space.wrap(self.last_instr)
def fget_f_trace(self, space):
- return self.w_f_trace
+ return self.get_w_f_trace()
def fset_f_trace(self, space, w_trace):
if space.is_w(w_trace, space.w_None):
- self.w_f_trace = None
+ self.getorcreatedebug().w_f_trace = None
else:
- self.w_f_trace = w_trace
- self.f_lineno = self.get_last_lineno()
+ d = self.getorcreatedebug()
+ d.w_f_trace = w_trace
+ d.f_lineno = self.get_last_lineno()
def fdel_f_trace(self, space):
- self.w_f_trace = None
+ self.getorcreatedebug().w_f_trace = None
def fget_f_exc_type(self, space):
if self.last_exception is not None:
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -114,14 +114,14 @@
# dispatch_bytecode(), causing the real exception to be
# raised after the exception handler block was popped.
try:
- trace = self.w_f_trace
+ trace = self.get_w_f_trace()
if trace is not None:
- self.w_f_trace = None
+ self.getorcreatedebug().w_f_trace = None
try:
ec.bytecode_trace_after_exception(self)
finally:
if trace is not None:
- self.w_f_trace = trace
+ self.getorcreatedebug().w_f_trace = trace
except OperationError, e:
operr = e
pytraceback.record_application_traceback(
@@ -455,7 +455,7 @@
def LOAD_FAST(self, varindex, next_instr):
# access a local variable directly
- w_value = self.locals_stack_w[varindex]
+ w_value = self.locals_cells_stack_w[varindex]
if w_value is None:
self._load_fast_failed(varindex)
self.pushvalue(w_value)
@@ -475,7 +475,7 @@
def STORE_FAST(self, varindex, next_instr):
w_newvalue = self.popvalue()
assert w_newvalue is not None
- self.locals_stack_w[varindex] = w_newvalue
+ self.locals_cells_stack_w[varindex] = w_newvalue
def getfreevarname(self, index):
freevarnames = self.pycode.co_cellvars + self.pycode.co_freevars
@@ -487,7 +487,7 @@
def LOAD_DEREF(self, varindex, next_instr):
# nested scopes: access a variable through its cell object
- cell = self.cells[varindex]
+ cell = self._getcell(varindex)
try:
w_value = cell.get()
except ValueError:
@@ -498,11 +498,11 @@
def STORE_DEREF(self, varindex, next_instr):
# nested scopes: access a variable through its cell object
w_newvalue = self.popvalue()
- cell = self.cells[varindex]
+ cell = self._getcell(varindex)
cell.set(w_newvalue)
def DELETE_DEREF(self, varindex, next_instr):
- cell = self.cells[varindex]
+ cell = self._getcell(varindex)
try:
cell.get()
except ValueError:
@@ -523,7 +523,7 @@
def LOAD_CLOSURE(self, varindex, next_instr):
# nested scopes: access the cell object
- cell = self.cells[varindex]
+ cell = self._getcell(varindex)
w_value = self.space.wrap(cell)
self.pushvalue(w_value)
@@ -684,10 +684,10 @@
raise operror
def LOAD_LOCALS(self, oparg, next_instr):
- self.pushvalue(self.w_locals)
+ self.pushvalue(self.getorcreatedebug().w_locals)
def STORE_LOCALS(self, oparg, next_instr):
- self.w_locals = self.popvalue()
+ self.getorcreatedebug().w_locals = self.popvalue()
def exec_(self, w_prog, w_globals, w_locals):
"""The builtins.exec function."""
@@ -709,8 +709,8 @@
space.call_method(w_globals, 'setdefault', space.wrap('__builtins__'),
space.wrap(self.get_builtin()))
- plain = (self.w_locals is not None and
- space.is_w(w_locals, self.w_locals))
+ plain = (self.get_w_locals() is not None and
+ space.is_w(w_locals, self.get_w_locals()))
if plain:
w_locals = self.getdictscope()
code.exec_code(space, w_globals, w_locals)
@@ -761,12 +761,13 @@
def STORE_NAME(self, varindex, next_instr):
varname = self.getname_u(varindex)
w_newvalue = self.popvalue()
- self.space.setitem_str(self.w_locals, varname, w_newvalue)
+ self.space.setitem_str(self.getorcreatedebug().w_locals, varname,
+ w_newvalue)
def DELETE_NAME(self, varindex, next_instr):
w_varname = self.getname_w(varindex)
try:
- self.space.delitem(self.w_locals, w_varname)
+ self.space.delitem(self.getorcreatedebug().w_locals, w_varname)
except OperationError, e:
# catch KeyErrors and turn them into NameErrors
if not e.match(self.space, self.space.w_KeyError):
@@ -834,8 +835,9 @@
def LOAD_NAME(self, nameindex, next_instr):
w_varname = self.getname_w(nameindex)
varname = self.space.identifier_w(w_varname)
- if self.w_locals is not self.w_globals:
- w_value = self.space.finditem_str(self.w_locals, varname)
+ if self.getorcreatedebug().w_locals is not self.w_globals:
+ w_value = self.space.finditem_str(self.getorcreatedebug().w_locals,
+ varname)
if w_value is not None:
self.pushvalue(w_value)
return
@@ -868,12 +870,12 @@
LOAD_GLOBAL._always_inline_ = True
def DELETE_FAST(self, varindex, next_instr):
- if self.locals_stack_w[varindex] is None:
+ if self.locals_cells_stack_w[varindex] is None:
varname = self.getlocalvarname(varindex)
raise oefmt(self.space.w_UnboundLocalError,
"local variable '%s' referenced before assignment",
varname)
- self.locals_stack_w[varindex] = None
+ self.locals_cells_stack_w[varindex] = None
def BUILD_TUPLE(self, itemcount, next_instr):
items = self.popvalues(itemcount)
@@ -971,7 +973,11 @@
if w_import is None:
raise OperationError(space.w_ImportError,
space.wrap("__import__ not found"))
- w_locals = self.w_locals
+ d = self.getdebug()
+ if d is None:
+ w_locals = None
+ else:
+ w_locals = d.w_locals
if w_locals is None: # CPython does this
w_locals = space.w_None
w_globals = self.w_globals
@@ -1148,7 +1154,7 @@
args = self.argument_factory(arguments, keywords, keywords_w, w_star,
w_starstar)
w_function = self.popvalue()
- if self.is_being_profiled and function.is_builtin_code(w_function):
+ if self.get_is_being_profiled() and
function.is_builtin_code(w_function):
w_result = self.space.call_args_and_c_profile(self, w_function,
args)
else:
diff --git a/pypy/interpreter/test/test_app_main.py
b/pypy/interpreter/test/test_app_main.py
--- a/pypy/interpreter/test/test_app_main.py
+++ b/pypy/interpreter/test/test_app_main.py
@@ -181,6 +181,11 @@
self.check([], {'PYTHONNOUSERSITE': '1'}, sys_argv=[''],
run_stdin=True, no_user_site=1)
self.check([], {'PYTHONUNBUFFERED': '1'}, sys_argv=[''],
run_stdin=True, unbuffered=1)
self.check([], {'PYTHONVERBOSE': '1'}, sys_argv=[''], run_stdin=True,
verbose=1)
+ self.check([], {'PYTHONOPTIMIZE': '1'}, sys_argv=[''], run_stdin=True,
optimize=1)
+ self.check([], {'PYTHONOPTIMIZE': '0'}, sys_argv=[''], run_stdin=True,
optimize=1)
+ self.check([], {'PYTHONOPTIMIZE': '10'}, sys_argv=[''],
run_stdin=True, optimize=10)
+ self.check(['-O'], {'PYTHONOPTIMIZE': '10'}, sys_argv=[''],
run_stdin=True, optimize=10)
+ self.check(['-OOO'], {'PYTHONOPTIMIZE': 'abc'}, sys_argv=[''],
run_stdin=True, optimize=3)
def test_sysflags(self):
flags = (
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -548,7 +548,7 @@
__objclass__ = GetSetProperty(GetSetProperty.descr_get_objclass),
__doc__ = interp_attrproperty('doc', cls=GetSetProperty),
)
-GetSetProperty.typedef.acceptable_as_base_class = False
+assert not GetSetProperty.typedef.acceptable_as_base_class # no __new__
class Member(W_Root):
@@ -602,7 +602,7 @@
__name__ = interp_attrproperty('name', cls=Member),
__objclass__ = interp_attrproperty_w('w_cls', cls=Member),
)
-Member.typedef.acceptable_as_base_class = False
+assert not Member.typedef.acceptable_as_base_class # no __new__
# ____________________________________________________________
@@ -722,7 +722,7 @@
co_flags = GetSetProperty(fget_co_flags, cls=Code),
co_consts = GetSetProperty(fget_co_consts, cls=Code),
)
-Code.typedef.acceptable_as_base_class = False
+assert not Code.typedef.acceptable_as_base_class # no __new__
BuiltinCode.typedef = TypeDef('builtin-code',
__reduce__ = interp2app(BuiltinCode.descr__reduce__),
@@ -733,7 +733,7 @@
co_flags = GetSetProperty(fget_co_flags, cls=BuiltinCode),
co_consts = GetSetProperty(fget_co_consts, cls=BuiltinCode),
)
-BuiltinCode.typedef.acceptable_as_base_class = False
+assert not BuiltinCode.typedef.acceptable_as_base_class # no __new__
PyCode.typedef = TypeDef('code',
@@ -779,7 +779,7 @@
f_locals = GetSetProperty(PyFrame.fget_getdictscope),
f_globals = interp_attrproperty_w('w_globals', cls=PyFrame),
)
-PyFrame.typedef.acceptable_as_base_class = False
+assert not PyFrame.typedef.acceptable_as_base_class # no __new__
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit