Author: Armin Rigo <[email protected]>
Branch: py3.5-scandir
Changeset: r86704:fc302e9992ff
Date: 2016-08-29 17:13 +0200
http://bitbucket.org/pypy/pypy/changeset/fc302e9992ff/
Log: hg merge py3.5
diff too long, truncating to 2000 out of 6469 lines
diff --git a/.hgtags b/.hgtags
--- a/.hgtags
+++ b/.hgtags
@@ -27,3 +27,6 @@
40497617ae91caa1a394d8be6f9cd2de31cb0628 release-pypy3.3-v5.2
c09c19272c990a0611b17569a0085ad1ab00c8ff release-pypy2.7-v5.3
7e8df3df96417c16c2d55b41352ec82c9c69c978 release-pypy2.7-v5.3.1
+68bb3510d8212ae9efb687e12e58c09d29e74f87 release-pypy2.7-v5.4.0
+68bb3510d8212ae9efb687e12e58c09d29e74f87 release-pypy2.7-v5.4.0
+77392ad263504df011ccfcabf6a62e21d04086d0 release-pypy2.7-v5.4.0
diff --git a/lib-python/3/importlib/_bootstrap_external.py
b/lib-python/3/importlib/_bootstrap_external.py
--- a/lib-python/3/importlib/_bootstrap_external.py
+++ b/lib-python/3/importlib/_bootstrap_external.py
@@ -228,7 +228,14 @@
# longer be understood by older implementations of the eval loop (usually
# due to the addition of new opcodes).
-MAGIC_NUMBER = (3350).to_bytes(2, 'little') + b'\r\n'
+# MAGIC_NUMBER = (3350).to_bytes(2, 'little') + b'\r\n'
+#
+# PyPy change: the MAGIC_NUMBER is defined in
+# pypy/interpreter/pycode.py, 'default_magic'. It is based on a number
+# different than CPython's, always < 3000. We get the 4-bytes string
+# here via a hack: MAGIC_NUMBER is set in the module from
+# module/_frozen_importlib/__init__.py before the module is executed.
+
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
_PYCACHE = '__pycache__'
diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py
--- a/lib_pypy/_ctypes/basics.py
+++ b/lib_pypy/_ctypes/basics.py
@@ -166,8 +166,8 @@
else:
return self.value
- def __buffer__(self):
- return memoryview(self._buffer)
+ def __buffer__(self, flags):
+ return buffer(self._buffer)
def _get_b_base(self):
try:
diff --git a/lib_pypy/_curses.py b/lib_pypy/_curses.py
--- a/lib_pypy/_curses.py
+++ b/lib_pypy/_curses.py
@@ -554,6 +554,9 @@
def putwin(self, filep):
# filestar = ffi.new("FILE *", filep)
return _check_ERR(lib.putwin(self._win, filep), "putwin")
+ # XXX CPython 3.5 says: We have to simulate this by writing to
+ # a temporary FILE*, then reading back, then writing to the
+ # argument stream.
def redrawln(self, beg, num):
return _check_ERR(lib.wredrawln(self._win, beg, num), "redrawln")
@@ -704,6 +707,7 @@
def getwin(filep):
+ # XXX CPython 3.5: there's logic to use a temp file instead
return Window(_check_NULL(lib.getwin(filep)))
diff --git a/lib_pypy/_pypy_interact.py b/lib_pypy/_pypy_interact.py
--- a/lib_pypy/_pypy_interact.py
+++ b/lib_pypy/_pypy_interact.py
@@ -49,9 +49,11 @@
if mainmodule is None:
import __main__ as mainmodule
console = code.InteractiveConsole(mainmodule.__dict__, filename='<stdin>')
- # some parts of code.py are copied here because it seems to be impossible
+ # some parts of code.py are copied here because it was impossible
# to start an interactive console without printing at least one line
- # of banner
+ # of banner. This was fixed in 3.4; but then from 3.6 it prints a
+ # line when exiting. This can be disabled too---by passing an argument
+ # that doesn't exist in <= 3.5. So, too much mess: just copy the code.
more = 0
while 1:
try:
diff --git a/lib_pypy/cffi/recompiler.py b/lib_pypy/cffi/recompiler.py
--- a/lib_pypy/cffi/recompiler.py
+++ b/lib_pypy/cffi/recompiler.py
@@ -515,7 +515,7 @@
tovar, errcode)
return
#
- elif isinstance(tp, (model.StructOrUnion, model.EnumType)):
+ elif isinstance(tp, model.StructOrUnionOrEnum):
# a struct (not a struct pointer) as a function argument
self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)'
% (tovar, self._gettypenum(tp), fromvar))
@@ -572,7 +572,7 @@
elif isinstance(tp, model.ArrayType):
return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
var, self._gettypenum(model.PointerType(tp.item)))
- elif isinstance(tp, model.StructType):
+ elif isinstance(tp, model.StructOrUnion):
if tp.fldnames is None:
raise TypeError("'%s' is used as %s, but is opaque" % (
tp._get_c_name(), context))
diff --git a/lib_pypy/cffi/vengine_cpy.py b/lib_pypy/cffi/vengine_cpy.py
--- a/lib_pypy/cffi/vengine_cpy.py
+++ b/lib_pypy/cffi/vengine_cpy.py
@@ -308,7 +308,7 @@
elif isinstance(tp, model.ArrayType):
return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % (
var, self._gettypenum(model.PointerType(tp.item)))
- elif isinstance(tp, model.StructType):
+ elif isinstance(tp, model.StructOrUnion):
if tp.fldnames is None:
raise TypeError("'%s' is used as %s, but is opaque" % (
tp._get_c_name(), context))
diff --git a/pypy/doc/conf.py b/pypy/doc/conf.py
--- a/pypy/doc/conf.py
+++ b/pypy/doc/conf.py
@@ -58,16 +58,16 @@
# General information about the project.
project = u'PyPy'
-copyright = u'2015, The PyPy Project'
+copyright = u'2016, The PyPy Project'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
-version = '4.0'
+version = '5.4'
# The full version, including alpha/beta/rc tags.
-release = '4.0.0'
+release = '5.4.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/pypy/doc/index-of-release-notes.rst
b/pypy/doc/index-of-release-notes.rst
--- a/pypy/doc/index-of-release-notes.rst
+++ b/pypy/doc/index-of-release-notes.rst
@@ -6,6 +6,7 @@
.. toctree::
+ release-pypy2.7-v5.4.0.rst
release-pypy2.7-v5.3.1.rst
release-pypy2.7-v5.3.0.rst
release-5.1.1.rst
diff --git a/pypy/doc/index-of-whatsnew.rst b/pypy/doc/index-of-whatsnew.rst
--- a/pypy/doc/index-of-whatsnew.rst
+++ b/pypy/doc/index-of-whatsnew.rst
@@ -7,6 +7,7 @@
.. toctree::
whatsnew-head.rst
+ whatsnew-pypy2-5.4.0.rst
whatsnew-pypy2-5.3.1.rst
whatsnew-pypy2-5.3.0.rst
whatsnew-5.1.0.rst
diff --git a/pypy/doc/project-ideas.rst b/pypy/doc/project-ideas.rst
--- a/pypy/doc/project-ideas.rst
+++ b/pypy/doc/project-ideas.rst
@@ -57,7 +57,7 @@
--------------
Our cpyext C-API compatiblity layer can now run upstream NumPy unmodified.
-Release PyPy2.7-v5.3 still fails about 200 of the ~6000 test in the NumPy
+Release PyPy2.7-v5.4 still fails about 60 of the ~6000 test in the NumPy
test suite. We could use help analyzing the failures and fixing them either
as patches to upstream NumPy, or as fixes to PyPy.
diff --git a/pypy/doc/release-pypy2.7-v5.4.0.rst
b/pypy/doc/release-pypy2.7-v5.4.0.rst
new file mode 100644
--- /dev/null
+++ b/pypy/doc/release-pypy2.7-v5.4.0.rst
@@ -0,0 +1,219 @@
+============
+PyPy2.7 v5.4
+============
+
+We have released PyPy2.7 v5.4, a little under two months after PyPy2.7 v5.3.
+This new PyPy2.7 release includes further improvements to our C-API
compatability layer (cpyext), enabling us to pass over 99% of the upstream
+numpy `test suite`_. We updated built-in cffi_ support to version 1.8,
+which now supports the "limited API" mode for c-extensions on
+CPython >=3.2.
+
+We improved tooling for the PyPy JIT_, and expanded VMProf
+support to OpenBSD and Dragon Fly BSD
+
+As always, this release fixed many issues and bugs raised by the
+growing community of PyPy users.
+
+XXXXX MORE ???
+
+You can download the PyPy2.7 v5.4 release here:
+
+ http://pypy.org/download.html
+
+We would like to thank our donors for the continued support of the PyPy
+project.
+
+We would also like to thank our contributors and
+encourage new people to join the project. PyPy has many
+layers and we need help with all of them: `PyPy`_ and `RPython`_ documentation
+improvements, tweaking popular `modules`_ to run on pypy, or general `help`_
+with making RPython's JIT even better.
+
+.. _`test suite`:
https://bitbucket.org/pypy/pypy/wiki/Adventures%20in%20cpyext%20compatibility
+.. _cffi: https://cffi.readthedocs.org
+.. _JIT:
https://morepypy.blogspot.com.au/2016/08/pypy-tooling-upgrade-jitviewer-and.html
+.. _`PyPy`: http://doc.pypy.org
+.. _`RPython`: https://rpython.readthedocs.org
+.. _`modules`:
http://doc.pypy.org/en/latest/project-ideas.html#make-more-python-modules-pypy-friendly
+.. _`help`: http://doc.pypy.org/en/latest/project-ideas.html
+
+What is PyPy?
+=============
+
+PyPy is a very compliant Python interpreter, almost a drop-in replacement for
+CPython 2.7. It's fast (`PyPy and CPython 2.7.x`_ performance comparison)
+due to its integrated tracing JIT compiler.
+
+We also welcome developers of other `dynamic languages`_ to see what RPython
+can do for them.
+
+This release supports:
+
+ * **x86** machines on most common operating systems
+ (Linux 32/64 bits, Mac OS X 64 bits, Windows 32 bits, OpenBSD, FreeBSD)
+
+ * newer **ARM** hardware (ARMv6 or ARMv7, with VFPv3) running Linux,
+
+ * big- and little-endian variants of **PPC64** running Linux,
+
+ * **s390x** running Linux
+
+.. _`PyPy and CPython 2.7.x`: http://speed.pypy.org
+.. _`dynamic languages`: http://pypyjs.org
+
+Other Highlights (since 5.3 released in June 2016)
+=========================================================
+
+* New features:
+
+ * Add `sys.{get,set}dlopenflags`
+
+ * Improve CPython compatibility of 'is' for small and empty strings
+
+ * Support for rgc.FinalizerQueue in the Boehm garbage collector
+
+ * (RPython) support spawnv() if it is called in C `_spawnv` on windows
+
+ * Fill in more slots when creating a PyTypeObject from a W_TypeObject,
+ like `__hex__`, `__sub__`, `__pow__`
+
+ * Copy CPython's logic more closely for `isinstance()` and
+ `issubclass()` as well as `type.__instancecheck__()` and
+ `type.__subclasscheck__()`
+
+ * Expose the name of CDLL objects
+
+ * Rewrite the win32 dependencies of `subprocess` to use cffi
+ instead of ctypes
+
+ * Improve the `JIT logging`_ facitilities
+
+ * (RPython) make int * string work
+
+ * Allocate all RPython strings with one extra byte, normally
+ unused. This now allows `ffi.from_buffer(string)` in CFFI with
+ no copy
+
+ * Adds a new commandline option `-X track-resources` that will
+ produce a `ResourceWarning` when the GC closes a file or socket.
+ The traceback for the place where the file or socket was allocated
+ is given as well, which aids finding places where `close()` is
+ missing
+
+ * Add missing `PyObject_Realloc`, `PySequence_GetSlice`
+
+ * `type.__dict__` now returns a `dict_proxy` object, like on CPython.
+ Previously it returned what looked like a regular dict object (but
+ it was already read-only)
+
+ * (RPython) add `rposix.{get,set}_inheritable()`, needed by Python 3.5
+
+ * (RPython) add `rposix_scandir` portably, needed for Python 3.5
+
+ * Support for memoryview attributes (format, itemsize, ...) which also
+ adds support for `PyMemoryView_FromObject`
+
+* Bug Fixes
+
+ * Reject `mkdir()` in read-only sandbox filesystems
+
+ * Add include guards to pymem.h to enable c++ compilation
+
+ * Fix build breakage on OpenBSD and FreeBSD
+
+ * Support OpenBSD, Dragon Fly BSD in VMProf
+
+ * Fix for `bytearray('').replace('a', 'ab')` for empty strings
+
+ * Sync internal state before calling `PyFile_AsFile()`
+
+ * Allow writing to a char* from `PyString_AsString()` until it is
+ forced, also refactor `PyStringObject` to look like CPython's
+ and allow subclassing `PyString_Type` and `PyUnicode_Type`
+
+ * Rpython rffi's socket(2) wrapper did not preserve errno
+
+ * Refactor `PyTupleObject` to look like CPython's and allow
+ subclassing `PyTuple_Type`
+
+ * Allow c-level assignment to a function pointer in a C-API
+ user-defined type after calling PyTypeReady by retrieving
+ a pointer to the function via offsets
+ rather than storing the function pointer itself
+
+ * Use `madvise(MADV_FREE)`, or if that doesn't exist
+ `MADV_DONTNEED` on freed arenas to release memory back to the
+ OS for resource monitoring
+
+ * Fix overflow detection in conversion of float to 64-bit integer
+ in timeout argument to various thread/threading primitives
+
+ * Fix win32 outputting `\r\r\n` in some cases
+
+ * Make `hash(-1)` return -2, as CPython does, and fix all the
+ ancilary places this matters
+
+ * Issues reported with our previous release were resolved_ after
+ reports from users on our issue tracker at
+ https://bitbucket.org/pypy/pypy/issues or on IRC at #pypy
+
+ * Fix `PyNumber_Check()` to behave more like CPython
+
+ * (VMProf) Try hard to not miss any Python-level frame in the
+ captured stacks, even if there is metainterp or blackhole interp
+ involved. Also fix the stacklet (greenlet) support
+
+ * Fix a critical JIT bug where `raw_malloc` -equivalent functions
+ lost the additional flags
+
+ * Fix the mapdict cache for subclasses of builtin types that
+ provide a dict
+
+* Performance improvements:
+
+ * Add a before_call()-like equivalent before a few operations like
+ `malloc_nursery`, to move values from registers into other registers
+ instead of to the stack.
+
+ * More tightly pack the stack when calling with `release gil`
+
+ * Support `int_floordiv()`, `int_mod()` in the JIT more efficiently
+ and add `rarithmetic.int_c_div()`, `rarithmetic.int_c_mod()` as
+ explicit interfaces. Clarify that `int_floordiv()` does python-style
+ rounding, unlike `llop.int_floordiv()`.
+
+ * Use `ll_assert` (more often) in incminimark
+
+ * (Testing) Simplify handling of interp-level tests and make it
+ more forward-compatible. Don't use interp-level RPython
+ machinery to test building app-level extensions in cpyext
+
+ * Constant-fold `ffi.offsetof("structname", "fieldname")` in cffi
+ backend
+
+ * Avoid a case in the JIT, where successive guard failures in
+ the same Python function end up as successive levels of
+ RPython functions, eventually exhausting the stack, while at
+ app-level the traceback is very short
+
+ * Check for NULL returns from calls to the raw-malloc and raise,
+ rather than a guard
+
+ * Improve `socket.recvfrom()` so that it copies less if possible
+
+ * When generating C code, inline `goto` to blocks with only one
+ predecessor, generating less lines of code
+
+ * When running the final backend-optimization phase before emitting
+ C code, constant-fold calls to we_are_jitted to return False. This
+ makes the generated C code a few percent smaller
+
+ * Refactor the `uid_t/gid_t` handling in `rlib.rposix` and in
+ `interp_posix.py`, based on the clean-up of CPython 2.7.x
+
+.. _`JIT logging`:
https://morepypy.blogspot.com/2016/08/pypy-tooling-upgrade-jitviewer-and.html
+.. _resolved: http://doc.pypy.org/en/latest/whatsnew-5.4.0.html
+
+Please update, and continue to help us make PyPy better.
+
+Cheers
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -1,146 +1,8 @@
-=========================
-What's new in PyPy2.7 5.3+
-=========================
+==========================
+What's new in PyPy2.7 5.4+
+==========================
-.. this is a revision shortly after release-pypy2.7-v5.3
-.. startrev: 873218a739f1
+.. this is a revision shortly after release-pypy2.7-v5.4
+.. startrev: 522736f816dc
-.. 418b05f95db5
-Improve CPython compatibility for ``is``. Now code like ``if x is ():``
-works the same way as it does on CPython. See
http://pypy.readthedocs.io/en/latest/cpython_differences.html#object-identity-of-primitive-values-is-and-id
.
-.. pull request #455
-Add sys.{get,set}dlopenflags, for cpyext extensions.
-
-.. branch: fix-gen-dfa
-
-Resolves an issue with the generator script to build the dfa for Python syntax.
-
-.. branch: z196-support
-
-Fixes a critical issue in the register allocator and extends support on s390x.
-PyPy runs and translates on the s390x revisions z10 (released February 2008,
experimental)
-and z196 (released August 2010) in addition to zEC12 and z13.
-To target e.g. z196 on a zEC12 machine supply CFLAGS="-march=z196" to your
shell environment.
-
-.. branch: s390x-5.3-catchup
-
-Implement the backend related changes for s390x.
-
-.. branch: incminimark-ll_assert
-.. branch: vmprof-openbsd
-
-.. branch: testing-cleanup
-
-Simplify handling of interp-level tests and make it more forward-
-compatible.
-
-.. branch: pyfile-tell
-Sync w_file with the c-level FILE* before returning FILE* in PyFile_AsFile
-
-.. branch: rw-PyString_AS_STRING
-Allow rw access to the char* returned from PyString_AS_STRING, also refactor
-PyStringObject to look like cpython's and allow subclassing PyString_Type and
-PyUnicode_Type
-
-.. branch: save_socket_errno
-
-Bug fix: if ``socket.socket()`` failed, the ``socket.error`` did not show
-the errno of the failing system call, but instead some random previous
-errno.
-
-.. branch: PyTuple_Type-subclass
-
-Refactor PyTupleObject to look like cpython's and allow subclassing
-PyTuple_Type
-
-.. branch: call-via-pyobj
-
-Use offsets from PyTypeObject to find actual c function to call rather than
-fixed functions, allows function override after PyType_Ready is called
-
-.. branch: issue2335
-
-Avoid exhausting the stack in the JIT due to successive guard
-failures in the same Python function ending up as successive levels of
-RPython functions, while at app-level the traceback is very short
-
-.. branch: use-madv-free
-
-Try harder to memory to the OS. See e.g. issue #2336. Note that it does
-not show up as a reduction of the VIRT column in ``top``, and the RES
-column might also not show the reduction, particularly on Linux >= 4.5 or
-on OS/X: it uses MADV_FREE, which only marks the pages as returnable to
-the OS if the memory is low.
-
-.. branch: cpyext-slotdefs2
-
-Fill in more slots when creating a PyTypeObject from a W_TypeObject
-More slots are still TBD, like tp_print and richcmp
-
-.. branch: json-surrogates
-
-Align json module decode with the cpython's impl, fixes issue 2345
-
-.. branch: issue2343
-
-Copy CPython's logic more closely for handling of ``__instancecheck__()``
-and ``__subclasscheck__()``. Fixes issue 2343.
-
-.. branch: msvcrt-cffi
-
-Rewrite the Win32 dependencies of 'subprocess' to use cffi instead
-of ctypes. This avoids importing ctypes in many small programs and
-scripts, which in turn avoids enabling threads (because ctypes
-creates callbacks at import time, and callbacks need threads).
-
-.. branch: new-jit-log
-
-The new logging facility that integrates with and adds features to vmprof.com.
-
-.. branch: jitlog-32bit
-
-Resolve issues to use the new logging facility on a 32bit system
-
-.. branch: ep2016sprint
-
-Trying harder to make hash(-1) return -2, like it does on CPython
-
-.. branch: jitlog-exact-source-lines
-
-Log exact line positions in debug merge points.
-
-.. branch: null_byte_after_str
-
-Allocate all RPython strings with one extra byte, normally unused.
-It is used to hold a final zero in case we need some ``char *``
-representation of the string, together with checks like ``not
-can_move()`` or object pinning. Main new thing that this allows:
-``ffi.from_buffer(string)`` in CFFI. Additionally, and most
-importantly, CFFI calls that take directly a string as argument don't
-copy the string any more---this is like CFFI on CPython.
-
-.. branch: resource_warning
-
-Add a new command line option -X track-resources which will produce
-ResourceWarnings when the GC closes unclosed files and sockets.
-
-.. branch: cpyext-realloc
-
-Implement PyObject_Realloc
-
-.. branch: inline-blocks
-
-Improve a little bit the readability of the generated C code
-
-.. branch: improve-vmprof-testing
-
-Improved vmprof support: now tries hard to not miss any Python-level
-frame in the captured stacks, even if there is the metainterp or
-blackhole interp involved. Also fix the stacklet (greenlet) support.
-
-.. branch: py2-mappingproxy
-
-``type.__dict__`` now returns a ``dict_proxy`` object, like on CPython.
-Previously it returned what looked like a regular dict object (but it
-was already read-only).
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-pypy2-5.4.0.rst
copy from pypy/doc/whatsnew-head.rst
copy to pypy/doc/whatsnew-pypy2-5.4.0.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-pypy2-5.4.0.rst
@@ -1,5 +1,5 @@
=========================
-What's new in PyPy2.7 5.3+
+What's new in PyPy2.7 5.4
=========================
.. this is a revision shortly after release-pypy2.7-v5.3
@@ -144,3 +144,22 @@
``type.__dict__`` now returns a ``dict_proxy`` object, like on CPython.
Previously it returned what looked like a regular dict object (but it
was already read-only).
+
+
+.. branch: const-fold-we-are-jitted
+
+Reduce the size of the generated C code by constant-folding ``we_are_jitted``
+in non-jitcode.
+
+.. branch: memoryview-attributes
+
+Support for memoryview attributes (format, itemsize, ...).
+Extends the cpyext emulation layer.
+
+.. branch: redirect-assembler-jitlog
+
+Log more information to properly rebuild the redirected traces in jitviewer.
+
+.. branch: cpyext-subclass
+
+Copy Py_TPFLAGS_CHECKTYPES, Py_TPFLAGS_HAVE_INPLACEOPS when inheriting
diff --git a/pypy/interpreter/astcompiler/ast.py
b/pypy/interpreter/astcompiler/ast.py
--- a/pypy/interpreter/astcompiler/ast.py
+++ b/pypy/interpreter/astcompiler/ast.py
@@ -432,7 +432,7 @@
_body = [stmt.from_object(space, w_item) for w_item in body_w]
decorator_list_w = space.unpackiterable(w_decorator_list)
_decorator_list = [expr.from_object(space, w_item) for w_item in
decorator_list_w]
- _returns = expr.from_object(space, w_returns) if w_returns is not None
else None
+ _returns = expr.from_object(space, w_returns)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return FunctionDef(_name, _args, _body, _decorator_list, _returns,
_lineno, _col_offset)
@@ -508,7 +508,7 @@
_body = [stmt.from_object(space, w_item) for w_item in body_w]
decorator_list_w = space.unpackiterable(w_decorator_list)
_decorator_list = [expr.from_object(space, w_item) for w_item in
decorator_list_w]
- _returns = expr.from_object(space, w_returns) if w_returns is not None
else None
+ _returns = expr.from_object(space, w_returns)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return AsyncFunctionDef(_name, _args, _body, _decorator_list,
_returns, _lineno, _col_offset)
@@ -630,7 +630,7 @@
w_value = get_field(space, w_node, 'value', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
- _value = expr.from_object(space, w_value) if w_value is not None else
None
+ _value = expr.from_object(space, w_value)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Return(_value, _lineno, _col_offset)
@@ -1190,8 +1190,8 @@
w_cause = get_field(space, w_node, 'cause', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
- _exc = expr.from_object(space, w_exc) if w_exc is not None else None
- _cause = expr.from_object(space, w_cause) if w_cause is not None else
None
+ _exc = expr.from_object(space, w_exc)
+ _cause = expr.from_object(space, w_cause)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Raise(_exc, _cause, _lineno, _col_offset)
@@ -1314,7 +1314,7 @@
_test = expr.from_object(space, w_test)
if _test is None:
raise_required_value(space, w_node, 'test')
- _msg = expr.from_object(space, w_msg) if w_msg is not None else None
+ _msg = expr.from_object(space, w_msg)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Assert(_test, _msg, _lineno, _col_offset)
@@ -2312,7 +2312,7 @@
w_value = get_field(space, w_node, 'value', True)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
- _value = expr.from_object(space, w_value) if w_value is not None else
None
+ _value = expr.from_object(space, w_value)
_lineno = space.int_w(w_lineno)
_col_offset = space.int_w(w_col_offset)
return Yield(_value, _lineno, _col_offset)
@@ -3101,9 +3101,9 @@
w_lower = get_field(space, w_node, 'lower', True)
w_upper = get_field(space, w_node, 'upper', True)
w_step = get_field(space, w_node, 'step', True)
- _lower = expr.from_object(space, w_lower) if w_lower is not None else
None
- _upper = expr.from_object(space, w_upper) if w_upper is not None else
None
- _step = expr.from_object(space, w_step) if w_step is not None else None
+ _lower = expr.from_object(space, w_lower)
+ _upper = expr.from_object(space, w_upper)
+ _step = expr.from_object(space, w_step)
return Slice(_lower, _upper, _step)
State.ast_type('Slice', 'slice', ['lower', 'upper', 'step'])
@@ -3583,7 +3583,7 @@
w_body = get_field(space, w_node, 'body', False)
w_lineno = get_field(space, w_node, 'lineno', False)
w_col_offset = get_field(space, w_node, 'col_offset', False)
- _type = expr.from_object(space, w_type) if w_type is not None else None
+ _type = expr.from_object(space, w_type)
_name = space.str_or_None_w(w_name)
body_w = space.unpackiterable(w_body)
_body = [stmt.from_object(space, w_item) for w_item in body_w]
@@ -3664,12 +3664,12 @@
w_defaults = get_field(space, w_node, 'defaults', False)
args_w = space.unpackiterable(w_args)
_args = [arg.from_object(space, w_item) for w_item in args_w]
- _vararg = arg.from_object(space, w_vararg) if w_vararg is not None
else None
+ _vararg = arg.from_object(space, w_vararg) if not space.is_w(w_vararg,
space.w_None) else None
kwonlyargs_w = space.unpackiterable(w_kwonlyargs)
_kwonlyargs = [arg.from_object(space, w_item) for w_item in
kwonlyargs_w]
kw_defaults_w = space.unpackiterable(w_kw_defaults)
_kw_defaults = [expr.from_object(space, w_item) for w_item in
kw_defaults_w]
- _kwarg = arg.from_object(space, w_kwarg) if w_kwarg is not None else
None
+ _kwarg = arg.from_object(space, w_kwarg) if not space.is_w(w_kwarg,
space.w_None) else None
defaults_w = space.unpackiterable(w_defaults)
_defaults = [expr.from_object(space, w_item) for w_item in defaults_w]
return arguments(_args, _vararg, _kwonlyargs, _kw_defaults, _kwarg,
_defaults)
@@ -3705,7 +3705,7 @@
_arg = space.identifier_w(w_arg)
if _arg is None:
raise_required_value(space, w_node, 'arg')
- _annotation = expr.from_object(space, w_annotation) if w_annotation is
not None else None
+ _annotation = expr.from_object(space, w_annotation)
return arg(_arg, _annotation)
State.ast_type('arg', 'AST', ['arg', 'annotation'])
@@ -3805,7 +3805,7 @@
_context_expr = expr.from_object(space, w_context_expr)
if _context_expr is None:
raise_required_value(space, w_node, 'context_expr')
- _optional_vars = expr.from_object(space, w_optional_vars) if
w_optional_vars is not None else None
+ _optional_vars = expr.from_object(space, w_optional_vars)
return withitem(_context_expr, _optional_vars)
State.ast_type('withitem', 'AST', ['context_expr', 'optional_vars'])
diff --git a/pypy/interpreter/astcompiler/astbuilder.py
b/pypy/interpreter/astcompiler/astbuilder.py
--- a/pypy/interpreter/astcompiler/astbuilder.py
+++ b/pypy/interpreter/astcompiler/astbuilder.py
@@ -4,12 +4,20 @@
from pypy.interpreter.pyparser.pygram import syms, tokens
from pypy.interpreter.pyparser.error import SyntaxError
from pypy.interpreter.pyparser import parsestring
-from rpython.rlib.objectmodel import always_inline
+from rpython.rlib.objectmodel import always_inline, we_are_translated
def ast_from_node(space, node, compile_info):
"""Turn a parse tree, node, to AST."""
- return ASTBuilder(space, node, compile_info).build_ast()
+ ast = ASTBuilder(space, node, compile_info).build_ast()
+ #
+ # When we are not translated, we send this ast to validate_ast.
+ # The goal is to check that validate_ast doesn't crash on valid
+ # asts, at least.
+ if not we_are_translated():
+ from pypy.interpreter.astcompiler import validate
+ validate.validate_ast(space, ast)
+ return ast
augassign_operator_map = {
diff --git a/pypy/interpreter/astcompiler/codegen.py
b/pypy/interpreter/astcompiler/codegen.py
--- a/pypy/interpreter/astcompiler/codegen.py
+++ b/pypy/interpreter/astcompiler/codegen.py
@@ -1234,8 +1234,7 @@
if d.values:
for i in range(len(d.values)):
key = d.keys[i]
- if key is None:
- is_unpacking = True
+ is_unpacking = key is None
if elements == 0xFFFF or (elements and is_unpacking):
self.emit_op_arg(ops.BUILD_MAP, elements)
containers += 1
@@ -1244,8 +1243,14 @@
d.values[i].walkabout(self)
containers += 1
else:
+ # TODO: key.walkabout has to be called before
d.values.walkabout
+ # that would fix the error "keywords must be strings"
+ # for some reason the keys and values seem to be in
reverse order
+ # in some cases, so another error has to be fixed in order
for
+ # this to work, otherwise it breaks everything
+ # after fix: remove dirty fixes in pyopcode
d.values[i].walkabout(self)
- d.keys[i].walkabout(self)
+ key.walkabout(self)
elements += 1
if elements or containers == 0:
self.emit_op_arg(ops.BUILD_MAP, elements)
@@ -1257,7 +1262,7 @@
oparg = min(containers, 255)
self.emit_op_arg(ops.BUILD_MAP_UNPACK, oparg)
containers -= (oparg - 1)
- is_unpacking = 0
+ is_unpacking = False
def visit_Set(self, s):
self._visit_starunpack(s, s.elts, ops.BUILD_SET, ops.BUILD_SET,
ops.BUILD_SET_UNPACK)
diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py
b/pypy/interpreter/astcompiler/test/test_compiler.py
--- a/pypy/interpreter/astcompiler/test/test_compiler.py
+++ b/pypy/interpreter/astcompiler/test/test_compiler.py
@@ -16,12 +16,14 @@
return codegen.compile_ast(space, ast, info)
def generate_function_code(expr, space):
+ from pypy.interpreter.astcompiler.ast import FunctionDef
p = pyparse.PythonParser(space)
info = pyparse.CompileInfo("<test>", 'exec')
cst = p.parse_source(expr, info)
ast = astbuilder.ast_from_node(space, cst, info)
function_ast = optimize.optimize_ast(space, ast.body[0], info)
function_ast = ast.body[0]
+ assert isinstance(function_ast, FunctionDef)
symbols = symtable.SymtableBuilder(space, ast, info)
generator = codegen.FunctionCodeGenerator(
space, 'function', function_ast, 1, symbols, info, qualname='function')
@@ -864,9 +866,10 @@
with a: pass
with a: pass
with a: pass
+ with a: pass
"""
code = compile_with_astcompiler(source, 'exec', self.space)
- assert code.co_stacksize == 5
+ assert code.co_stacksize == 6 # i.e. <= 7, there is no systematic leak
def test_stackeffect_bug5(self):
source = """if 1:
@@ -1345,7 +1348,9 @@
assert ops.BINARY_POWER not in counts
def test_call_function_var(self):
- source = """call(*me)"""
+ source = """def f():
+ call(*me)
+ """
code, blocks = generate_function_code(source, self.space)
# there is a stack computation error
assert blocks[0].instructions[3].arg == 0
diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py
b/pypy/interpreter/astcompiler/tools/asdl_py.py
--- a/pypy/interpreter/astcompiler/tools/asdl_py.py
+++ b/pypy/interpreter/astcompiler/tools/asdl_py.py
@@ -160,7 +160,17 @@
else:
extractor = "%s.from_object(space, %s)" % (field.type, value)
if field.opt:
- extractor += " if %s is not None else None" % (value,)
+ if field.type == 'expr':
+ # the expr.from_object() method should accept w_None and
+ # return None; nothing more to do here
+ pass
+ elif field.type == 'arg':
+ # the method arg.from_object() doesn't accept w_None
+ extractor += (
+ ' if not space.is_w(%s, space.w_None) else None'
+ % (value,))
+ else:
+ raise NotImplementedError(field.type)
return extractor
def get_field_converter(self, field):
diff --git a/pypy/interpreter/astcompiler/validate.py
b/pypy/interpreter/astcompiler/validate.py
--- a/pypy/interpreter/astcompiler/validate.py
+++ b/pypy/interpreter/astcompiler/validate.py
@@ -176,6 +176,13 @@
if node.returns:
self._validate_expr(node.returns)
+ def visit_AsyncFunctionDef(self, node):
+ self._validate_body(node.body, "AsyncFunctionDef")
+ node.args.walkabout(self)
+ self._validate_exprs(node.decorator_list)
+ if node.returns:
+ self._validate_expr(node.returns)
+
def visit_keyword(self, node):
self._validate_expr(node.value)
@@ -193,6 +200,9 @@
if node.value:
self._validate_expr(node.value)
+ def visit_Await(self, node):
+ self._validate_expr(node.value)
+
def visit_Delete(self, node):
self._validate_nonempty_seq(node.targets, "targets", "Delete")
self._validate_exprs(node.targets, ast.Del)
@@ -212,6 +222,12 @@
self._validate_body(node.body, "For")
self._validate_stmts(node.orelse)
+ def visit_AsyncFor(self, node):
+ self._validate_expr(node.target, ast.Store)
+ self._validate_expr(node.iter)
+ self._validate_body(node.body, "AsyncFor")
+ self._validate_stmts(node.orelse)
+
def visit_While(self, node):
self._validate_expr(node.test)
self._validate_body(node.body, "While")
@@ -232,6 +248,11 @@
self.visit_sequence(node.items)
self._validate_body(node.body, "With")
+ def visit_AsyncWith(self, node):
+ self._validate_nonempty_seq(node.items, "items", "AsyncWith")
+ self.visit_sequence(node.items)
+ self._validate_body(node.body, "AsyncWith")
+
def visit_Raise(self, node):
if node.exc:
self._validate_expr(node.exc)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -837,13 +837,18 @@
self.interned_strings.set(u, w_s1)
return w_s1
- def is_interned_str(self, s):
- """Assumes an identifier (utf-8 encoded str)"""
+ def get_interned_str(self, s):
+ """Assumes an identifier (utf-8 encoded str). Returns None if
+ the identifier is not interned, or not a valid utf-8 string at all.
+ """
# interface for marshal_impl
if not we_are_translated():
assert type(s) is str
- u = s.decode('utf-8')
- return self.interned_strings.get(u) is not None
+ try:
+ u = s.decode('utf-8')
+ except UnicodeDecodeError:
+ return None
+ return self.interned_strings.get(u) # may be None
def descr_self_interp_w(self, RequiredClass, w_obj):
if not isinstance(w_obj, RequiredClass):
@@ -942,8 +947,8 @@
idx += 1
if idx < expected_length:
raise oefmt(self.w_ValueError,
- "need more than %d value%s to unpack",
- idx, "" if idx == 1 else "s")
+ "not enough values to unpack (expected %d, got %d)",
+ expected_length, idx)
return items
def unpackiterable_unroll(self, w_iterable, expected_length):
diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py
--- a/pypy/interpreter/pycode.py
+++ b/pypy/interpreter/pycode.py
@@ -35,9 +35,10 @@
# we compute the magic number in a similar way to CPython, but we use a
# different value for the highest 16 bits. Bump pypy_incremental_magic every
-# time you make pyc files incompatible
+# time you make pyc files incompatible. This value ends up in the frozen
+# importlib, via MAGIC_NUMBER in module/_frozen_importlib/__init__.
-pypy_incremental_magic = 64 # bump it by 16
+pypy_incremental_magic = 80 # bump it by 16
assert pypy_incremental_magic % 16 == 0
assert pypy_incremental_magic < 3000 # the magic number of Python 3. There are
# no known magic numbers below this value
diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py
--- a/pypy/interpreter/pyopcode.py
+++ b/pypy/interpreter/pyopcode.py
@@ -820,13 +820,9 @@
itemcount = len(items)
count = left + right
if count > itemcount:
- if count == 1:
- plural = ''
- else:
- plural = 's'
raise oefmt(self.space.w_ValueError,
- "need more than %d value%s to unpack",
- itemcount, plural)
+ "not enough values to unpack (expected at least %d,
got %d)",
+ count, itemcount)
right = itemcount - right
assert right >= 0
# push values in reverse order
@@ -1202,7 +1198,7 @@
self.settopvalue(self.space.w_None)
@jit.unroll_safe
- def call_function(self, oparg, w_star=None, w_starstar=None):
+ def call_function(self, oparg, w_starstar=None, has_vararg=False):
n_arguments = oparg & 0xff
n_keywords = (oparg>>8) & 0xff
if n_keywords:
@@ -1214,20 +1210,16 @@
break
w_value = self.popvalue()
w_key = self.popvalue()
- # temporary (dirty) fix: if star-arg occurs after kwarg,
- # arg order is reversed on stack
- from pypy.objspace.std.listobject import W_ListObject
- if isinstance(w_key, W_ListObject):
- w_key_temp = w_key
- w_key = w_value
- w_value = w_star
- w_star = w_key_temp
key = self.space.identifier_w(w_key)
keywords[n_keywords] = key
keywords_w[n_keywords] = w_value
else:
keywords = None
keywords_w = None
+ if has_vararg:
+ w_star = self.popvalue()
+ else:
+ w_star = None
arguments = self.popvalues(n_arguments)
args = self.argument_factory(arguments, keywords, keywords_w, w_star,
w_starstar)
@@ -1256,17 +1248,15 @@
self.call_function(oparg)
def CALL_FUNCTION_VAR(self, oparg, next_instr):
- w_varargs = self.popvalue()
- self.call_function(oparg, w_varargs)
+ self.call_function(oparg, has_vararg=True)
def CALL_FUNCTION_KW(self, oparg, next_instr):
w_varkw = self.popvalue()
- self.call_function(oparg, None, w_varkw)
+ self.call_function(oparg, w_varkw)
def CALL_FUNCTION_VAR_KW(self, oparg, next_instr):
w_varkw = self.popvalue()
- w_varargs = self.popvalue()
- self.call_function(oparg, w_varargs, w_varkw)
+ self.call_function(oparg, w_varkw, has_vararg=True)
@jit.unroll_safe
def _make_function(self, oparg, freevars=None):
diff --git a/pypy/interpreter/test/test_interpreter.py
b/pypy/interpreter/test/test_interpreter.py
--- a/pypy/interpreter/test/test_interpreter.py
+++ b/pypy/interpreter/test/test_interpreter.py
@@ -214,6 +214,15 @@
assert self.codetest(code, 'g', [12, {}]) == ()
assert self.codetest(code, 'g', [12, {3:1}]) == (3,)
+ def test_star_arg_after_keyword_arg(self):
+ code = '''
+ def f(a, b):
+ return a - b
+ def g(a, b):
+ return f(b=b, *(a,))
+ '''
+ assert self.codetest(code, 'g', [40, 2]) == 38
+
def test_closure(self):
code = '''
def f(x, y):
@@ -458,7 +467,7 @@
try:
a, *b, c, d, e = Seq()
except ValueError as e:
- assert str(e) == "need more than 3 values to unpack"
+ assert str(e) == "not enough values to unpack (expected at least
4, got 3)"
else:
assert False, "Expected ValueError"
"""
diff --git a/pypy/module/_file/readinto.py b/pypy/module/_file/readinto.py
deleted file mode 100644
--- a/pypy/module/_file/readinto.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import sys, errno
-from rpython.rlib import rposix
-from rpython.rlib.objectmodel import keepalive_until_here
-from rpython.rtyper.lltypesystem import lltype, rffi
-from pypy.module._file.interp_file import is_wouldblock_error, signal_checker
-
-_WIN32 = sys.platform.startswith('win')
-UNDERSCORE_ON_WIN32 = '_' if _WIN32 else ''
-
-os_read = rffi.llexternal(UNDERSCORE_ON_WIN32 + 'read',
- [rffi.INT, rffi.CCHARP, rffi.SIZE_T],
- rffi.SSIZE_T, save_err=rffi.RFFI_SAVE_ERRNO)
-
-
-def direct_readinto(self, w_rwbuffer):
- rwbuffer = self.space.writebuf_w(w_rwbuffer)
- stream = self.getstream()
- size = rwbuffer.getlength()
- target_address = lltype.nullptr(rffi.CCHARP.TO)
- fd = -1
- target_pos = 0
-
- if size > 64:
- try:
- target_address = rwbuffer.get_raw_address()
- except ValueError:
- pass
- else:
- fd = stream.try_to_find_file_descriptor()
-
- if fd < 0 or not target_address:
- # fall-back
- MAX_PART = 1024 * 1024 # 1 MB
- while size > MAX_PART:
- data = self.direct_read(MAX_PART)
- rwbuffer.setslice(target_pos, data)
- target_pos += len(data)
- size -= len(data)
- if len(data) != MAX_PART:
- break
- else:
- data = self.direct_read(size)
- rwbuffer.setslice(target_pos, data)
- target_pos += len(data)
-
- else:
- # optimized case: reading more than 64 bytes into a rwbuffer
- # with a valid raw address
- self.check_readable()
-
- # first "read" the part that is already sitting in buffers, if any
- initial_size = min(size, stream.count_buffered_bytes())
- if initial_size > 0:
- data = stream.read(initial_size)
- rwbuffer.setslice(target_pos, data)
- target_pos += len(data)
- size -= len(data)
-
- # then call os_read() to get the rest
- if size > 0:
- stream.flush()
- while True:
- got = os_read(fd, rffi.ptradd(target_address, target_pos),
size)
- got = rffi.cast(lltype.Signed, got)
- if got > 0:
- target_pos += got
- size -= got
- if size <= 0:
- break
- elif got == 0:
- break
- else:
- err = rposix.get_saved_errno()
- if err == errno.EINTR:
- signal_checker(self.space)()
- continue
- if is_wouldblock_error(err) and target_pos > 0:
- break
- raise OSError(err, "read error")
- keepalive_until_here(rwbuffer)
-
- return self.space.wrap(target_pos)
diff --git a/pypy/module/_frozen_importlib/__init__.py
b/pypy/module/_frozen_importlib/__init__.py
--- a/pypy/module/_frozen_importlib/__init__.py
+++ b/pypy/module/_frozen_importlib/__init__.py
@@ -19,7 +19,8 @@
with open(os.path.join(lib_python, 'importlib', name + '.py')) as fp:
source = fp.read()
pathname = "<frozen importlib.%s>" % name
- code_w = Module._cached_compile(space, source, pathname, 'exec', 0)
+ code_w = Module._cached_compile(space, name, source,
+ pathname, 'exec', 0)
space.setitem(w_dict, space.wrap('__name__'), w_name)
space.setitem(w_dict, space.wrap('__builtins__'),
space.wrap(space.builtin))
@@ -27,10 +28,15 @@
def install(self):
"""NOT_RPYTHON"""
+ from pypy.module.imp import interp_imp
+
super(Module, self).install()
space = self.space
# "import importlib/_boostrap_external.py"
w_mod = Module(space, space.wrap("_frozen_importlib_external"))
+ # hack: inject MAGIC_NUMBER into this module's dict
+ space.setattr(w_mod, space.wrap('MAGIC_NUMBER'),
+ interp_imp.get_magic(space))
self._compile_bootstrap_module(
space, '_bootstrap_external', w_mod.w_name, w_mod.w_dict)
space.sys.setmodule(w_mod)
@@ -43,11 +49,13 @@
self.w_import = space.wrap(interp_import.import_with_frames_removed)
@staticmethod
- def _cached_compile(space, source, *args):
+ def _cached_compile(space, name, source, *args):
from rpython.config.translationoption import CACHE_DIR
from pypy.module.marshal import interp_marshal
+ from pypy.interpreter.pycode import default_magic
- cachename = os.path.join(CACHE_DIR, 'frozen_importlib_bootstrap')
+ cachename = os.path.join(CACHE_DIR, 'frozen_importlib_%d%s' % (
+ default_magic, name))
try:
if space.config.translating:
raise IOError("don't use the cache when translating pypy")
diff --git a/pypy/module/_io/interp_fileio.py b/pypy/module/_io/interp_fileio.py
--- a/pypy/module/_io/interp_fileio.py
+++ b/pypy/module/_io/interp_fileio.py
@@ -4,6 +4,7 @@
OperationError, oefmt, wrap_oserror, wrap_oserror2)
from rpython.rlib.rarithmetic import r_longlong
from rpython.rlib.rstring import StringBuilder
+from rpython.rlib import rposix
from os import O_RDONLY, O_WRONLY, O_RDWR, O_CREAT, O_TRUNC, O_EXCL
import sys, os, stat, errno
from pypy.module._io.interp_iobase import W_RawIOBase, convert_size
@@ -29,6 +30,7 @@
O_BINARY = getattr(os, "O_BINARY", 0)
O_APPEND = getattr(os, "O_APPEND", 0)
+_open_inhcache = rposix.SetNonInheritableCache()
def _bad_mode(space):
raise oefmt(space.w_ValueError,
@@ -139,6 +141,7 @@
@unwrap_spec(mode=str, closefd=int)
def descr_init(self, space, w_name, mode='r', closefd=True, w_opener=None):
+ self._close(space)
if space.isinstance_w(w_name, space.w_float):
raise oefmt(space.w_TypeError,
"integer argument expected, got float")
@@ -153,6 +156,8 @@
raise oefmt(space.w_ValueError, "negative file descriptor")
self.readable, self.writable, self.created, self.appending, flags =
decode_mode(space, mode)
+ if rposix.O_CLOEXEC is not None:
+ flags |= rposix.O_CLOEXEC
fd_is_own = False
try:
@@ -171,8 +176,7 @@
raise oefmt(space.w_ValueError,
"Cannot use closefd=False with file name")
- from pypy.module.posix.interp_posix import (
- dispatch_filename, rposix)
+ from pypy.module.posix.interp_posix import dispatch_filename
try:
self.fd = dispatch_filename(rposix.open)(
space, w_name, flags, 0666)
@@ -181,6 +185,11 @@
exception_name='w_IOError')
finally:
fd_is_own = True
+ if not rposix._WIN32:
+ try:
+ _open_inhcache.set_non_inheritable(self.fd)
+ except OSError as e:
+ raise wrap_oserror2(space, e, w_name)
else:
w_fd = space.call_function(w_opener, w_name, space.wrap(flags))
try:
@@ -192,6 +201,11 @@
"expected integer from opener")
finally:
fd_is_own = True
+ if not rposix._WIN32:
+ try:
+ rposix.set_inheritable(self.fd, False)
+ except OSError as e:
+ raise wrap_oserror2(space, e, w_name)
self._dircheck(space, w_name)
space.setattr(self, space.wrap("name"), w_name)
diff --git a/pypy/module/_io/test/test_fileio.py
b/pypy/module/_io/test/test_fileio.py
--- a/pypy/module/_io/test/test_fileio.py
+++ b/pypy/module/_io/test/test_fileio.py
@@ -246,6 +246,33 @@
assert f.mode == 'xb'
raises(FileExistsError, _io.FileIO, filename, 'x')
+ def test_non_inheritable(self):
+ import _io, posix
+ f = _io.FileIO(self.tmpfile, 'r')
+ assert posix.get_inheritable(f.fileno()) == False
+ f.close()
+
+ def test_FileIO_fd_does_not_change_inheritable(self):
+ import _io, posix
+ fd1, fd2 = posix.pipe()
+ posix.set_inheritable(fd1, True)
+ posix.set_inheritable(fd2, False)
+ f1 = _io.FileIO(fd1, 'r')
+ f2 = _io.FileIO(fd2, 'w')
+ assert posix.get_inheritable(fd1) == True
+ assert posix.get_inheritable(fd2) == False
+ f1.close()
+ f2.close()
+
+ def test_close_upon_reinit(self):
+ import _io, posix
+ f = _io.FileIO(self.tmpfile, 'r')
+ fd1 = f.fileno()
+ f.__init__(self.tmpfile, 'w')
+ fd2 = f.fileno()
+ if fd1 != fd2:
+ raises(OSError, posix.close, fd1)
+
def test_flush_at_exit():
from pypy import conftest
diff --git a/pypy/module/_posixsubprocess/_posixsubprocess.c
b/pypy/module/_posixsubprocess/_posixsubprocess.c
--- a/pypy/module/_posixsubprocess/_posixsubprocess.c
+++ b/pypy/module/_posixsubprocess/_posixsubprocess.c
@@ -106,6 +106,30 @@
}
+RPY_EXTERN
+int rpy_set_inheritable(int fd, int inheritable); /* rposix.py */
+
+static int
+make_inheritable(long *py_fds_to_keep, ssize_t num_fds_to_keep,
+ int errpipe_write)
+{
+ long i;
+
+ for (i = 0; i < num_fds_to_keep; ++i) {
+ long fd = py_fds_to_keep[i];
+ if (fd == errpipe_write) {
+ /* errpipe_write is part of py_fds_to_keep. It must be closed at
+ exec(), but kept open in the child process until exec() is
+ called. */
+ continue;
+ }
+ if (rpy_set_inheritable((int)fd, 1) < 0)
+ return -1;
+ }
+ return 0;
+}
+
+
/* Close all file descriptors in the range start_fd inclusive to
* end_fd exclusive except for those in py_fds_to_keep. If the
* range defined by [start_fd, end_fd) is large this will take a
@@ -329,6 +353,9 @@
/* Buffer large enough to hold a hex integer. We can't malloc. */
char hex_errno[sizeof(saved_errno)*2+1];
+ if (make_inheritable(py_fds_to_keep, num_fds_to_keep, errpipe_write) < 0)
+ goto error;
+
/* Close parent's pipe ends. */
if (p2cwrite != -1) {
POSIX_CALL(close(p2cwrite));
@@ -352,26 +379,25 @@
dup2() removes the CLOEXEC flag but we must do it ourselves if dup2()
would be a no-op (issue #10806). */
if (p2cread == 0) {
- int old = fcntl(p2cread, F_GETFD);
- if (old != -1)
- fcntl(p2cread, F_SETFD, old & ~FD_CLOEXEC);
- } else if (p2cread != -1) {
+ if (rpy_set_inheritable(p2cread, 1) < 0)
+ goto error;
+ }
+ else if (p2cread != -1)
POSIX_CALL(dup2(p2cread, 0)); /* stdin */
+
+ if (c2pwrite == 1) {
+ if (rpy_set_inheritable(c2pwrite, 1) < 0)
+ goto error;
}
- if (c2pwrite == 1) {
- int old = fcntl(c2pwrite, F_GETFD);
- if (old != -1)
- fcntl(c2pwrite, F_SETFD, old & ~FD_CLOEXEC);
- } else if (c2pwrite != -1) {
+ else if (c2pwrite != -1)
POSIX_CALL(dup2(c2pwrite, 1)); /* stdout */
+
+ if (errwrite == 2) {
+ if (rpy_set_inheritable(errwrite, 1) < 0)
+ goto error;
}
- if (errwrite == 2) {
- int old = fcntl(errwrite, F_GETFD);
- if (old != -1)
- fcntl(errwrite, F_SETFD, old & ~FD_CLOEXEC);
- } else if (errwrite != -1) {
+ else if (errwrite != -1)
POSIX_CALL(dup2(errwrite, 2)); /* stderr */
- }
/* Close pipe fds. Make sure we don't close the same fd more than */
/* once, or standard fds. */
diff --git a/pypy/module/_posixsubprocess/_posixsubprocess.h
b/pypy/module/_posixsubprocess/_posixsubprocess.h
--- a/pypy/module/_posixsubprocess/_posixsubprocess.h
+++ b/pypy/module/_posixsubprocess/_posixsubprocess.h
@@ -1,3 +1,4 @@
+#include <unistd.h> /* for ssize_t */
#include "src/precommondefs.h"
RPY_EXTERN void
diff --git a/pypy/module/_posixsubprocess/interp_subprocess.py
b/pypy/module/_posixsubprocess/interp_subprocess.py
--- a/pypy/module/_posixsubprocess/interp_subprocess.py
+++ b/pypy/module/_posixsubprocess/interp_subprocess.py
@@ -5,6 +5,7 @@
from rpython.rtyper.tool import rffi_platform as platform
from rpython.translator import cdir
from rpython.translator.tool.cbuild import ExternalCompilationInfo
+from rpython.rlib import rposix
from pypy.interpreter.error import (
OperationError, exception_from_saved_errno, oefmt, wrap_oserror)
@@ -36,6 +37,7 @@
compile_extra.append("-DHAVE_SETSID")
eci = eci.merge(
+ rposix.eci_inheritable,
ExternalCompilationInfo(
compile_extra=compile_extra))
diff --git a/pypy/module/_posixsubprocess/test/test_subprocess.py
b/pypy/module/_posixsubprocess/test/test_subprocess.py
--- a/pypy/module/_posixsubprocess/test/test_subprocess.py
+++ b/pypy/module/_posixsubprocess/test/test_subprocess.py
@@ -75,3 +75,18 @@
n = 1
raises(OverflowError, _posixsubprocess.fork_exec,
1,Z(),3,[1, 2],5,6,7,8,9,10,11,12,13,14,15,16,17)
+
+ def test_pass_fds_make_inheritable(self):
+ import subprocess, posix
+
+ fd1, fd2 = posix.pipe()
+ assert posix.get_inheritable(fd1) is False
+ assert posix.get_inheritable(fd2) is False
+
+ subprocess.check_call(['/usr/bin/env', 'python', '-c',
+ 'import os;os.write(%d,b"K")' % fd2],
+ close_fds=True, pass_fds=[fd2])
+ res = posix.read(fd1, 1)
+ assert res == b"K"
+ posix.close(fd1)
+ posix.close(fd2)
diff --git a/pypy/module/_socket/interp_func.py
b/pypy/module/_socket/interp_func.py
--- a/pypy/module/_socket/interp_func.py
+++ b/pypy/module/_socket/interp_func.py
@@ -142,21 +142,11 @@
@unwrap_spec(fd=int)
def dup(space, fd):
- newfd = rsocket.dup(fd)
- return space.wrap(newfd)
-
-@unwrap_spec(fd=int, family=int, type=int, proto=int)
-def fromfd(space, fd, family, type, proto=0):
- """fromfd(fd, family, type[, proto]) -> socket object
-
- Create a socket object from the given file descriptor.
- The remaining arguments are the same as for socket().
- """
try:
- sock = rsocket.fromfd(fd, family, type, proto)
+ newfd = rsocket.dup(fd, inheritable=False)
except SocketError as e:
raise converted_error(space, e)
- return space.wrap(W_Socket(space, sock))
+ return space.wrap(newfd)
@unwrap_spec(family=int, type=int, proto=int)
def socketpair(space, family=rsocket.socketpair_default_family,
@@ -170,7 +160,8 @@
AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
"""
try:
- sock1, sock2 = rsocket.socketpair(family, type, proto)
+ sock1, sock2 = rsocket.socketpair(family, type, proto,
+ inheritable=False)
except SocketError as e:
raise converted_error(space, e)
return space.newtuple([
diff --git a/pypy/module/_socket/interp_socket.py
b/pypy/module/_socket/interp_socket.py
--- a/pypy/module/_socket/interp_socket.py
+++ b/pypy/module/_socket/interp_socket.py
@@ -177,7 +177,7 @@
sock = RSocket(family, type, proto,
fd=space.c_filedescriptor_w(w_fileno))
else:
- sock = RSocket(family, type, proto)
+ sock = RSocket(family, type, proto, inheritable=False)
W_Socket.__init__(self, space, sock)
except SocketError as e:
raise converted_error(space, e)
@@ -228,7 +228,7 @@
For IP sockets, the address info is a pair (hostaddr, port).
"""
try:
- fd, addr = self.sock.accept()
+ fd, addr = self.sock.accept(inheritable=False)
return space.newtuple([space.wrap(fd),
addr_as_object(addr, fd, space)])
except SocketError as e:
diff --git a/pypy/module/_socket/test/test_sock_app.py
b/pypy/module/_socket/test/test_sock_app.py
--- a/pypy/module/_socket/test/test_sock_app.py
+++ b/pypy/module/_socket/test/test_sock_app.py
@@ -546,11 +546,19 @@
s.ioctl(_socket.SIO_KEEPALIVE_VALS, (1, 100, 100))
def test_dup(self):
- import _socket as socket
+ import _socket as socket, posix
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('localhost', 0))
fd = socket.dup(s.fileno())
assert s.fileno() != fd
+ assert posix.get_inheritable(s.fileno()) is False
+ assert posix.get_inheritable(fd) is False
+ posix.close(fd)
+ s.close()
+
+ def test_dup_error(self):
+ import _socket
+ raises(_socket.error, _socket.dup, 123456)
def test_buffer(self):
# Test that send/sendall/sendto accept a buffer as arg
@@ -648,6 +656,26 @@
assert len(w) == 1, [str(warning) for warning in w]
assert r in str(w[0])
+ def test_invalid_fd(self):
+ import _socket
+ raises(ValueError, _socket.socket, fileno=-1)
+
+ def test_socket_non_inheritable(self):
+ import _socket, posix
+ s1 = _socket.socket()
+ assert posix.get_inheritable(s1.fileno()) is False
+ s1.close()
+
+ def test_socketpair_non_inheritable(self):
+ import _socket, posix
+ if not hasattr(_socket, 'socketpair'):
+ skip("no socketpair")
+ s1, s2 = _socket.socketpair()
+ assert posix.get_inheritable(s1.fileno()) is False
+ assert posix.get_inheritable(s2.fileno()) is False
+ s1.close()
+ s2.close()
+
class AppTestNetlink:
def setup_class(cls):
@@ -826,6 +854,16 @@
assert cli.family == socket.AF_INET
+ def test_accept_non_inheritable(self):
+ import _socket, posix
+ cli = _socket.socket()
+ cli.connect(self.serv.getsockname())
+ fileno, addr = self.serv._accept()
+ assert posix.get_inheritable(fileno) is False
+ posix.close(fileno)
+ cli.close()
+
+
class AppTestErrno:
spaceconfig = {'usemodules': ['_socket']}
diff --git a/pypy/module/_sre/__init__.py b/pypy/module/_sre/__init__.py
--- a/pypy/module/_sre/__init__.py
+++ b/pypy/module/_sre/__init__.py
@@ -1,4 +1,4 @@
-from pypy.interpreter.mixedmodule import MixedModule
+from pypy.interpreter.mixedmodule import MixedModule
class Module(MixedModule):
@@ -7,7 +7,7 @@
interpleveldefs = {
'CODESIZE': 'space.wrap(interp_sre.CODESIZE)',
- 'MAGIC': 'space.wrap(interp_sre.MAGIC)',
+ 'MAGIC': 'space.newint(20140917)',
'MAXREPEAT': 'space.wrap(interp_sre.MAXREPEAT)',
'MAXGROUPS': 'space.wrap(interp_sre.MAXGROUPS)',
'compile': 'interp_sre.W_SRE_Pattern',
diff --git a/pypy/module/_sre/interp_sre.py b/pypy/module/_sre/interp_sre.py
--- a/pypy/module/_sre/interp_sre.py
+++ b/pypy/module/_sre/interp_sre.py
@@ -13,8 +13,8 @@
#
# Constants and exposed functions
-from rpython.rlib.rsre import rsre_core
-from rpython.rlib.rsre.rsre_char import MAGIC, CODESIZE, MAXREPEAT, MAXGROUPS,
getlower, set_unicode_db
+from rpython.rlib.rsre import rsre_core, rsre_char
+from rpython.rlib.rsre.rsre_char import CODESIZE, MAXREPEAT, MAXGROUPS,
getlower, set_unicode_db
@unwrap_spec(char_ord=int, flags=int)
@@ -92,6 +92,10 @@
#
# SRE_Pattern class
+FLAG_NAMES = ["re.TEMPLATE", "re.IGNORECASE", "re.LOCALE", "re.MULTILINE",
+ "re.DOTALL", "re.UNICODE", "re.VERBOSE", "re.DEBUG",
+ "re.ASCII"]
+
class W_SRE_Pattern(W_Root):
_immutable_fields_ = ["code", "flags", "num_groups", "w_groupindex"]
@@ -99,7 +103,44 @@
space = self.space
raise oefmt(space.w_TypeError, "cannot copy this pattern object")
- def make_ctx(self, w_string, pos=0, endpos=sys.maxint):
+ def repr_w(self):
+ space = self.space
+ u = space.unicode_w(space.repr(self.w_pattern))
+ flag_items = []
+ flags = self.flags
+ if self.is_known_unicode():
+ if ((flags & (rsre_char.SRE_FLAG_LOCALE |
+ rsre_char.SRE_FLAG_UNICODE |
+ 256)) # rsre_char.SRE_FLAG_ASCII
+ == rsre_char.SRE_FLAG_UNICODE):
+ flags &= ~rsre_char.SRE_FLAG_UNICODE
+ for i, name in enumerate(FLAG_NAMES):
+ if flags & (1 << i):
+ flags -= (1 << i)
+ flag_items.append(name)
+ if flags != 0:
+ flag_items.append('0x%x' % flags)
+ if len(flag_items) == 0:
+ usep = u''
+ uflags = u''
+ else:
+ usep = u', '
+ uflags = u'|'.join([item.decode('latin-1') for item in flag_items])
+ return space.wrap(u're.compile(%s%s%s)' % (u, usep, uflags))
+
+ def is_known_bytes(self):
+ space = self.space
+ if space.is_none(self.w_pattern):
+ return False
+ return not space.isinstance_w(self.w_pattern, space.w_unicode)
+
+ def is_known_unicode(self):
+ space = self.space
+ if space.is_none(self.w_pattern):
+ return False
+ return space.isinstance_w(self.w_pattern, space.w_unicode)
+
+ def make_ctx(self, w_string, pos=0, endpos=sys.maxint, flags=0):
"""Make a StrMatchContext, BufMatchContext or a UnicodeMatchContext for
searching in the given w_string object."""
space = self.space
@@ -107,10 +148,10 @@
pos = 0
if endpos < pos:
endpos = pos
+ flags = self.flags | flags
if space.isinstance_w(w_string, space.w_unicode):
unicodestr = space.unicode_w(w_string)
- if not (space.is_none(self.w_pattern) or
- space.isinstance_w(self.w_pattern, space.w_unicode)):
+ if self.is_known_bytes():
raise oefmt(space.w_TypeError,
"can't use a bytes pattern on a string-like "
"object")
@@ -119,10 +160,9 @@
if endpos > len(unicodestr):
endpos = len(unicodestr)
return rsre_core.UnicodeMatchContext(self.code, unicodestr,
- pos, endpos, self.flags)
+ pos, endpos, flags)
elif space.isinstance_w(w_string, space.w_str):
- if (not space.is_none(self.w_pattern) and
- space.isinstance_w(self.w_pattern, space.w_unicode)):
+ if self.is_known_unicode():
raise oefmt(space.w_TypeError,
"can't use a string pattern on a bytes-like "
"object")
@@ -132,11 +172,10 @@
if endpos > len(str):
endpos = len(str)
return rsre_core.StrMatchContext(self.code, str,
- pos, endpos, self.flags)
+ pos, endpos, flags)
else:
buf = space.readbuf_w(w_string)
- if (not space.is_none(self.w_pattern) and
- space.isinstance_w(self.w_pattern, space.w_unicode)):
+ if self.is_known_unicode():
raise oefmt(space.w_TypeError,
"can't use a string pattern on a bytes-like "
"object")
@@ -147,7 +186,7 @@
if endpos > size:
endpos = size
return rsre_core.BufMatchContext(self.code, buf,
- pos, endpos, self.flags)
+ pos, endpos, flags)
def getmatch(self, ctx, found):
if found:
@@ -161,6 +200,12 @@
return self.getmatch(ctx, matchcontext(self.space, ctx))
@unwrap_spec(pos=int, endpos=int)
+ def fullmatch_w(self, w_string, pos=0, endpos=sys.maxint):
+ ctx = self.make_ctx(w_string, pos, endpos)
+ ctx.fullmatch_only = True
+ return self.getmatch(ctx, matchcontext(self.space, ctx))
+
+ @unwrap_spec(pos=int, endpos=int)
def search_w(self, w_string, pos=0, endpos=sys.maxint):
ctx = self.make_ctx(w_string, pos, endpos)
return self.getmatch(ctx, searchcontext(self.space, ctx))
@@ -415,10 +460,12 @@
__new__ = interp2app(SRE_Pattern__new__),
__copy__ = interp2app(W_SRE_Pattern.cannot_copy_w),
__deepcopy__ = interp2app(W_SRE_Pattern.cannot_copy_w),
+ __repr__ = interp2app(W_SRE_Pattern.repr_w),
__weakref__ = make_weakref_descr(W_SRE_Pattern),
findall = interp2app(W_SRE_Pattern.findall_w),
finditer = interp2app(W_SRE_Pattern.finditer_w),
match = interp2app(W_SRE_Pattern.match_w),
+ fullmatch = interp2app(W_SRE_Pattern.fullmatch_w),
scanner = interp2app(W_SRE_Pattern.finditer_w), # reuse finditer()
search = interp2app(W_SRE_Pattern.search_w),
split = interp2app(W_SRE_Pattern.split_w),
diff --git a/pypy/module/_sre/test/support_test_app_sre.py
b/pypy/module/_sre/test/support_test_app_sre.py
--- a/pypy/module/_sre/test/support_test_app_sre.py
+++ b/pypy/module/_sre/test/support_test_app_sre.py
@@ -1,6 +1,13 @@
"""Support functions for app-level _sre tests."""
import locale, _sre
-from sre_constants import OPCODES, ATCODES, CHCODES, MAXREPEAT
+from sre_constants import OPCODES as _OPCODES
+from sre_constants import ATCODES as _ATCODES
+from sre_constants import CHCODES as _CHCODES
+from sre_constants import MAXREPEAT
+
+OPCODES = {_opcode.name.lower(): int(_opcode) for _opcode in _OPCODES}
+ATCODES = {_atcode.name.lower(): int(_atcode) for _atcode in _ATCODES}
+CHCODES = {_chcode.name.lower(): int(_chcode) for _chcode in _CHCODES}
def encode_literal(string):
opcodes = []
diff --git a/pypy/module/_sre/test/test_app_sre.py
b/pypy/module/_sre/test/test_app_sre.py
--- a/pypy/module/_sre/test/test_app_sre.py
+++ b/pypy/module/_sre/test/test_app_sre.py
@@ -116,6 +116,22 @@
import _sre
raises(TypeError, _sre.compile, {}, 0, [])
+ def test_fullmatch(self):
+ import re
+ assert re.compile(r"ab*c").fullmatch("abbcdef") is None
+ assert re.compile(r"ab*c").fullmatch("abbc") is not None
+ assert re.fullmatch(r"ab*c", "abbbcdef") is None
+ assert re.fullmatch(r"ab*c", "abbbc") is not None
+
+ def test_repr(self):
+ import re
+ r = re.compile(r'f(o"\d)', 0)
+ assert repr(r) == (
+ r"""re.compile('f(o"\\d)')""")
+ r = re.compile(r'f(o"\d)', re.IGNORECASE|re.DOTALL|re.VERBOSE)
+ assert repr(r) == (
+ r"""re.compile('f(o"\\d)', re.IGNORECASE|re.DOTALL|re.VERBOSE)""")
+
class AppTestSreMatch:
spaceconfig = dict(usemodules=('array', ))
diff --git a/pypy/module/_winreg/interp_winreg.py
b/pypy/module/_winreg/interp_winreg.py
--- a/pypy/module/_winreg/interp_winreg.py
+++ b/pypy/module/_winreg/interp_winreg.py
@@ -356,9 +356,15 @@
elif typ == rwinreg.REG_SZ or typ == rwinreg.REG_EXPAND_SZ:
if not buflen:
- return space.wrap("")
- s = rffi.charp2strn(rffi.cast(rffi.CCHARP, buf), buflen)
- return space.wrap(s)
+ s = ""
+ else:
+ # may or may not have a trailing NULL in the buffer.
+ buf = rffi.cast(rffi.CCHARP, buf)
+ if buf[buflen - 1] == '\x00':
+ buflen -= 1
+ s = rffi.charp2strn(buf, buflen)
+ w_s = space.wrap(s)
+ return space.call_method(w_s, 'decode', space.wrap('mbcs'))
elif typ == rwinreg.REG_MULTI_SZ:
if not buflen:
@@ -458,7 +464,7 @@
return space.newtuple([
convert_from_regdata(space, databuf,
length, retType[0]),
- space.wrap(retType[0]),
+ space.wrap(intmask(retType[0])),
])
@unwrap_spec(subkey=str)
@@ -610,7 +616,7 @@
space.wrap(rffi.charp2str(valuebuf)),
convert_from_regdata(space, databuf,
length, retType[0]),
- space.wrap(retType[0]),
+ space.wrap(intmask(retType[0])),
])
@unwrap_spec(index=int)
diff --git a/pypy/module/_winreg/test/test_winreg.py
b/pypy/module/_winreg/test/test_winreg.py
--- a/pypy/module/_winreg/test/test_winreg.py
+++ b/pypy/module/_winreg/test/test_winreg.py
@@ -154,6 +154,7 @@
def test_readValues(self):
from winreg import OpenKey, EnumValue, QueryValueEx, EnumKey
+ from winreg import REG_SZ, REG_EXPAND_SZ
key = OpenKey(self.root_key, self.test_key_name)
sub_key = OpenKey(key, "sub_key")
index = 0
@@ -167,7 +168,10 @@
assert index == len(self.test_data)
for name, value, type in self.test_data:
- assert QueryValueEx(sub_key, name) == (value, type)
+ result = QueryValueEx(sub_key, name)
+ assert result == (value, type)
+ if type == REG_SZ or type == REG_EXPAND_SZ:
+ assert isinstance(result[0], unicode) # not string
assert EnumKey(key, 0) == "sub_key"
raises(EnvironmentError, EnumKey, key, 1)
diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py
--- a/pypy/module/cpyext/api.py
+++ b/pypy/module/cpyext/api.py
@@ -119,8 +119,8 @@
constant_names = """
Py_TPFLAGS_READY Py_TPFLAGS_READYING Py_TPFLAGS_HAVE_GETCHARBUFFER
METH_COEXIST METH_STATIC METH_CLASS Py_TPFLAGS_BASETYPE
-METH_NOARGS METH_VARARGS METH_KEYWORDS METH_O
-Py_TPFLAGS_HEAPTYPE Py_TPFLAGS_HAVE_CLASS
+METH_NOARGS METH_VARARGS METH_KEYWORDS METH_O Py_TPFLAGS_HAVE_INPLACEOPS
+Py_TPFLAGS_HEAPTYPE Py_TPFLAGS_HAVE_CLASS Py_TPFLAGS_HAVE_NEWBUFFER
Py_LT Py_LE Py_EQ Py_NE Py_GT Py_GE Py_TPFLAGS_CHECKTYPES
Py_CLEANUP_SUPPORTED
""".split()
@@ -651,6 +651,7 @@
#('smalltable', rffi.CFixedArray(Py_ssize_t, 2)),
('internal', rffi.VOIDP)
))
+Py_bufferP = lltype.Ptr(Py_buffer)
@specialize.memo()
def is_PyObject(TYPE):
@@ -974,12 +975,14 @@
py_type_ready(space, get_capsule_type())
INIT_FUNCTIONS.append(init_types)
from pypy.module.posix.interp_posix import add_fork_hook
- reinit_tls = rffi.llexternal('%sThread_ReInitTLS' % prefix, [],
lltype.Void,
- compilation_info=eci)
global py_fatalerror
py_fatalerror = rffi.llexternal('%s_FatalError' % prefix,
[CONST_STRING], lltype.Void,
compilation_info=eci)
+ _reinit_tls = rffi.llexternal('%sThread_ReInitTLS' % prefix, [],
+ lltype.Void, compilation_info=eci)
+ def reinit_tls(space):
+ _reinit_tls()
add_fork_hook('child', reinit_tls)
def init_function(func):
diff --git a/pypy/module/cpyext/buffer.py b/pypy/module/cpyext/buffer.py
--- a/pypy/module/cpyext/buffer.py
+++ b/pypy/module/cpyext/buffer.py
@@ -1,9 +1,13 @@
+from pypy.interpreter.error import oefmt
from rpython.rtyper.lltypesystem import rffi, lltype
from rpython.rlib import buffer
from pypy.module.cpyext.api import (
cpython_api, CANNOT_FAIL, Py_buffer)
from pypy.module.cpyext.pyobject import PyObject, Py_DecRef
+# PyObject_GetBuffer has been removed, it is defined in abstract.c
+# PyObject_CheckBuffer is also already defined
+
@cpython_api([lltype.Ptr(Py_buffer), lltype.Char], rffi.INT_real,
error=CANNOT_FAIL)
def PyBuffer_IsContiguous(space, view, fortran):
"""Return 1 if the memory defined by the view is C-style (fortran is
diff --git a/pypy/module/cpyext/include/patchlevel.h
b/pypy/module/cpyext/include/patchlevel.h
--- a/pypy/module/cpyext/include/patchlevel.h
+++ b/pypy/module/cpyext/include/patchlevel.h
@@ -29,8 +29,8 @@
#define PY_VERSION "3.3.5"
/* PyPy version as a string */
-#define PYPY_VERSION "5.3.2-alpha0"
-#define PYPY_VERSION_NUM 0x05030200
+#define PYPY_VERSION "5.4.1-alpha0"
+#define PYPY_VERSION_NUM 0x05040100
/* Defined to mean a PyPy where cpyext holds more regular references
to PyObjects, e.g. staying alive as long as the internal PyPy object
diff --git a/pypy/module/cpyext/memoryobject.py
b/pypy/module/cpyext/memoryobject.py
--- a/pypy/module/cpyext/memoryobject.py
+++ b/pypy/module/cpyext/memoryobject.py
@@ -16,7 +16,7 @@
@cpython_api([PyObject], PyObject)
def PyMemoryView_GET_BASE(space, w_obj):
# return the obj field of the Py_buffer created by PyMemoryView_GET_BUFFER
- raise NotImplementedError
+ raise NotImplementedError('PyMemoryView_GET_BUFFER')
@cpython_api([PyObject], lltype.Ptr(Py_buffer), error=CANNOT_FAIL)
def PyMemoryView_GET_BUFFER(space, w_obj):
diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py
--- a/pypy/module/cpyext/slotdefs.py
+++ b/pypy/module/cpyext/slotdefs.py
@@ -7,7 +7,7 @@
cpython_api, generic_cpy_call, PyObject, Py_ssize_t, Py_TPFLAGS_CHECKTYPES,
Py_buffer, mangle_name, pypy_decl)
from pypy.module.cpyext.typeobjectdefs import (
- unaryfunc, wrapperfunc, ternaryfunc, PyTypeObjectPtr, binaryfunc,
+ unaryfunc, wrapperfunc, ternaryfunc, PyTypeObjectPtr, binaryfunc,
ternaryfunc,
getattrfunc, getattrofunc, setattrofunc, lenfunc, ssizeargfunc, inquiry,
ssizessizeargfunc, ssizeobjargproc, iternextfunc, initproc, richcmpfunc,
cmpfunc, hashfunc, descrgetfunc, descrsetfunc, objobjproc, objobjargproc,
@@ -20,8 +20,12 @@
from rpython.rlib.buffer import Buffer
from rpython.rlib.unroll import unrolling_iterable
from rpython.rlib.objectmodel import specialize
+from rpython.rlib.rarithmetic import widen
from rpython.tool.sourcetools import func_renamer
from rpython.rtyper.annlowlevel import llhelper
+from pypy.module.sys.version import CPYTHON_VERSION
+
+PY3 = CPYTHON_VERSION[0] == 3
# XXX: Also defined in object.h
Py_LT = 0
@@ -298,11 +302,23 @@
# Similar to Py_buffer
_immutable_ = True
- def __init__(self, ptr, size, w_obj):
+ def __init__(self, ptr, size, w_obj, format='B', shape=None,
+ strides=None, ndim=1, itemsize=1, readonly=True):
self.ptr = ptr
self.size = size
self.w_obj = w_obj # kept alive
- self.readonly = True
+ self.format = format
+ if not shape:
+ self.shape = [size]
+ else:
+ self.shape = shape
+ if not strides:
+ self.strides = [1]
+ else:
+ self.strides = strides
+ self.ndim = ndim
+ self.itemsize = itemsize
+ self.readonly = readonly
def getlength(self):
return self.size
@@ -313,14 +329,38 @@
def get_raw_address(self):
return rffi.cast(rffi.CCHARP, self.ptr)
+ def getformat(self):
+ return self.format
+
+ def getshape(self):
+ return self.shape
+
+ def getitemsize(self):
+ return self.itemsize
+
def wrap_getbuffer(space, w_self, w_args, func):
func_target = rffi.cast(getbufferproc, func)
- with lltype.scoped_alloc(Py_buffer) as view:
- flags = rffi.cast(rffi.INT_real, 0)
- ret = generic_cpy_call(space, func_target, w_self, view, flags)
- if rffi.cast(lltype.Signed, ret) == -1:
+ with lltype.scoped_alloc(Py_buffer) as pybuf:
+ _flags = 0
+ if space.len_w(w_args) > 0:
+ _flags = space.int_w(space.listview(w_args)[0])
+ flags = rffi.cast(rffi.INT_real,_flags)
+ size = generic_cpy_call(space, func_target, w_self, pybuf, flags)
+ if widen(size) < 0:
space.fromcache(State).check_and_raise_exception(always=True)
- return space.newbuffer(CPyBuffer(view.c_buf, view.c_len, w_self))
+ ptr = pybuf.c_buf
+ size = pybuf.c_len
+ ndim = widen(pybuf.c_ndim)
+ shape = [pybuf.c_shape[i] for i in range(ndim)]
+ strides = [pybuf.c_strides[i] for i in range(ndim)]
+ if pybuf.c_format:
+ format = rffi.charp2str(pybuf.c_format)
+ else:
+ format = 'B'
+ return space.newbuffer(CPyBuffer(ptr, size, w_self, format=format,
+ ndim=ndim, shape=shape, strides=strides,
+ itemsize=pybuf.c_itemsize,
+ readonly=widen(pybuf.c_readonly)))
def get_richcmp_func(OP_CONST):
def inner(space, w_self, w_args, func):
@@ -542,6 +582,21 @@
w_stararg=w_args, w_starstararg=w_kwds)
return space.call_args(space.get(new_fn, w_self), args)
api_func = slot_tp_new.api_func
+ elif name == 'tp_as_buffer.c_bf_getbuffer':
+ buff_fn = w_type.getdictvalue(space, '__buffer__')
+ if buff_fn is None:
+ return
+ @cpython_api([PyObject, Py_bufferP, rffi.INT_real],
+ rffi.INT_real, header=None, error=-1)
+ @func_renamer("cpyext_%s_%s" % (name.replace('.', '_'), typedef.name))
+ def buff_w(space, w_self, pybuf, flags):
+ # XXX this is wrong, needs a test
+ raise oefmt(space.w_NotImplemented,
+ "calling bf_getbuffer on a builtin type not supported yet")
+ #args = Arguments(space, [w_self],
+ # w_stararg=w_args, w_starstararg=w_kwds)
+ #return space.call_args(space.get(buff_fn, w_self), args)
+ api_func = buff_w.api_func
else:
_______________________________________________
pypy-commit mailing list
[email protected]
https://mail.python.org/mailman/listinfo/pypy-commit