Author: Matti Picus <matti.pi...@gmail.com>
Branch: py3.5
Changeset: r93790:4e598c42abcc
Date: 2018-02-09 08:59 -0500
http://bitbucket.org/pypy/pypy/changeset/4e598c42abcc/

Log:    merge default into py3.5

diff too long, truncating to 2000 out of 4107 lines

diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py
--- a/lib_pypy/_sqlite3.py
+++ b/lib_pypy/_sqlite3.py
@@ -155,9 +155,10 @@
     factory = Connection if not factory else factory
     # an sqlite3 db seems to be around 100 KiB at least (doesn't matter if
     # backed by :memory: or a file)
+    res = factory(database, timeout, detect_types, isolation_level,
+                    check_same_thread, factory, cached_statements, uri)
     add_memory_pressure(100 * 1024)
-    return factory(database, timeout, detect_types, isolation_level,
-                    check_same_thread, factory, cached_statements, uri)
+    return res
 
 
 def _unicode_text_factory(x):
diff --git a/lib_pypy/cffi.egg-info/PKG-INFO b/lib_pypy/cffi.egg-info/PKG-INFO
--- a/lib_pypy/cffi.egg-info/PKG-INFO
+++ b/lib_pypy/cffi.egg-info/PKG-INFO
@@ -1,11 +1,12 @@
 Metadata-Version: 1.1
 Name: cffi
-Version: 1.11.3
+Version: 1.11.4
 Summary: Foreign Function Interface for Python calling C code.
 Home-page: http://cffi.readthedocs.org
 Author: Armin Rigo, Maciej Fijalkowski
 Author-email: python-c...@googlegroups.com
 License: MIT
+Description-Content-Type: UNKNOWN
 Description: 
         CFFI
         ====
@@ -27,5 +28,7 @@
 Classifier: Programming Language :: Python :: 3.2
 Classifier: Programming Language :: Python :: 3.3
 Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
 Classifier: Programming Language :: Python :: Implementation :: CPython
 Classifier: Programming Language :: Python :: Implementation :: PyPy
diff --git a/lib_pypy/cffi/__init__.py b/lib_pypy/cffi/__init__.py
--- a/lib_pypy/cffi/__init__.py
+++ b/lib_pypy/cffi/__init__.py
@@ -4,8 +4,8 @@
 from .api import FFI
 from .error import CDefError, FFIError, VerificationError, VerificationMissing
 
-__version__ = "1.11.3"
-__version_info__ = (1, 11, 3)
+__version__ = "1.11.4"
+__version_info__ = (1, 11, 4)
 
 # The verifier module file names are based on the CRC32 of a string that
 # contains the following version number.  It may be older than __version__
diff --git a/lib_pypy/cffi/_cffi_include.h b/lib_pypy/cffi/_cffi_include.h
--- a/lib_pypy/cffi/_cffi_include.h
+++ b/lib_pypy/cffi/_cffi_include.h
@@ -8,37 +8,20 @@
    the same works for the other two macros.  Py_DEBUG implies them,
    but not the other way around.
 
-   Issue #350: more mess: on Windows, with _MSC_VER, we have to define
-   Py_LIMITED_API even before including pyconfig.h.  In that case, we
-   guess what pyconfig.h will do to the macros above, and check our
-   guess after the #include.
+   Issue #350 is still open: on Windows, the code here causes it to link
+   with PYTHON36.DLL (for example) instead of PYTHON3.DLL.  A fix was
+   attempted in 164e526a5515 and 14ce6985e1c3, but reverted: virtualenv
+   does not make PYTHON3.DLL available, and so the "correctly" compiled
+   version would not run inside a virtualenv.  We will re-apply the fix
+   after virtualenv has been fixed for some time.  For explanation, see
+   issue #355.  For a workaround if you want PYTHON3.DLL and don't worry
+   about virtualenv, see issue #350.  See also 'py_limited_api' in
+   setuptools_ext.py.
 */
 #if !defined(_CFFI_USE_EMBEDDING) && !defined(Py_LIMITED_API)
-#  ifdef _MSC_VER
-#    if !defined(_DEBUG) && !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && 
!defined(Py_REF_DEBUG)
-#      define Py_LIMITED_API
-#    endif
-#    include <pyconfig.h>
-     /* sanity-check: Py_LIMITED_API will cause crashes if any of these
-        are also defined.  Normally, the Python file PC/pyconfig.h does not
-        cause any of these to be defined, with the exception that _DEBUG
-        causes Py_DEBUG.  Double-check that. */
-#    ifdef Py_LIMITED_API
-#      if defined(Py_DEBUG)
-#        error "pyconfig.h unexpectedly defines Py_DEBUG but _DEBUG is not set"
-#      endif
-#      if defined(Py_TRACE_REFS)
-#        error "pyconfig.h unexpectedly defines Py_TRACE_REFS"
-#      endif
-#      if defined(Py_REF_DEBUG)
-#        error "pyconfig.h unexpectedly defines Py_REF_DEBUG"
-#      endif
-#    endif
-#  else
-#    include <pyconfig.h>
-#    if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
-#      define Py_LIMITED_API
-#    endif
+#  include <pyconfig.h>
+#  if !defined(Py_DEBUG) && !defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
+#    define Py_LIMITED_API
 #  endif
 #endif
 
diff --git a/lib_pypy/cffi/_embedding.h b/lib_pypy/cffi/_embedding.h
--- a/lib_pypy/cffi/_embedding.h
+++ b/lib_pypy/cffi/_embedding.h
@@ -247,7 +247,7 @@
 
         if (f != NULL && f != Py_None) {
             PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
-                               "\ncompiled with cffi version: 1.11.3"
+                               "\ncompiled with cffi version: 1.11.4"
                                "\n_cffi_backend module: ", f);
             modules = PyImport_GetModuleDict();
             mod = PyDict_GetItemString(modules, "_cffi_backend");
diff --git a/lib_pypy/greenlet.egg-info b/lib_pypy/greenlet.egg-info
--- a/lib_pypy/greenlet.egg-info
+++ b/lib_pypy/greenlet.egg-info
@@ -1,6 +1,6 @@
 Metadata-Version: 1.0
 Name: greenlet
-Version: 0.4.12
+Version: 0.4.13
 Summary: Lightweight in-process concurrent programming
 Home-page: https://github.com/python-greenlet/greenlet
 Author: Ralf Schmitt (for CPython), PyPy team
diff --git a/lib_pypy/greenlet.py b/lib_pypy/greenlet.py
--- a/lib_pypy/greenlet.py
+++ b/lib_pypy/greenlet.py
@@ -2,7 +2,7 @@
 import __pypy__
 import _continuation
 
-__version__ = "0.4.12"
+__version__ = "0.4.13"
 
 # ____________________________________________________________
 # Exceptions
diff --git a/pypy/doc/project-ideas.rst b/pypy/doc/project-ideas.rst
--- a/pypy/doc/project-ideas.rst
+++ b/pypy/doc/project-ideas.rst
@@ -1,26 +1,41 @@
 Potential Project List
 ======================
 
-Google Summer of Code 2017
---------------------------
+Getting involved
+----------------
 
-PyPy is generally open to new ideas for Google Summer of Code. We are happy to 
accept good ideas around the PyPy ecosystem. If you need more information about 
the ideas we propose for this year please join us on irc, channel #pypy 
(freenode). If you are unsure, but still think that you can make a valuable 
contribution to PyPy, dont hesitate to contact us on #pypy or on our mailing 
list.
-
+We are happy to discuss ideas around the PyPy ecosystem.
+If you are interested in palying with RPython or PyPy, or have a new idea not
+mentioned here please join us on irc, channel #pypy (freenode). If you are 
unsure,
+but still think that you can make a valuable contribution to PyPy, dont
+hesitate to contact us on #pypy or on our mailing list. Here are some ideas
+to get you thinking:
 
 * **Optimize PyPy Memory Usage**:  Sometimes PyPy consumes more memory than 
CPython.
-  Two examples: 1) PyPy seems to allocate and keep alive more strings when 
importing a big Python modules.
-  2) The base interpreter size (cold VM started from a console) of PyPy is 
bigger than the one of CPython.
-  The general procedure of this project is: Run both CPython and PyPy of the 
same Python version and
-  compare the memory usage (using Massif or other tools).
+  Two examples: 1) PyPy seems to allocate and keep alive more strings when
+  importing a big Python modules.  2) The base interpreter size (cold VM 
started
+  from a console) of PyPy is bigger than the one of CPython. The general
+  procedure of this project is: Run both CPython and PyPy of the same Python
+  version and compare the memory usage (using Massif or other tools).
   If PyPy consumes a lot more memory then find and resolve the issue.
 
-* **VMProf + memory profiler**: vmprof by now has a memory profiler that can 
be used already. We want extend it with more features and resolve some current 
limitations.
+* **VMProf + memory profiler**: vmprof is a statistical memory profiler. We
+  want extend it with new features and resolve some current limitations.
 
-* **VMProf visualisations**: vmprof just shows a flame graph of the 
statistical profile and some more information about specific call sites. It 
would be very interesting to experiment with different information (such as 
memory, or even information generated by our jit compiler).
+* **VMProf visualisations**: vmprof shows a flame graph of the statistical
+  profile and some more information about specific call sites. It would be
+  very interesting to experiment with different information (such as memory,
+  or even information generated by our jit compiler).
 
-* **Explicit typing in RPython**: PyPy wants to have better ways to specify 
the signature and class attribute types in RPython. See more information about 
this topic below on this page.
+* **Explicit typing in RPython**: PyPy wants to have better ways to specify
+  the signature and class attribute types in RPython. See more information
+  about this topic below on this page.
 
-* **Virtual Reality (VR) visualisations for vmprof**: This is a very open 
topic with lots of freedom to explore data visualisation for profiles. No VR 
hardware would be needed for this project. Either universities provide such 
hardware or in any other case we potentially can lend the VR hardware setup.
+* **Virtual Reality (VR) visualisations for vmprof**: This is a very open
+  topic with lots of freedom to explore data visualisation for profiles. No
+  VR hardware would be needed for this project. Either universities provide
+  such hardware or in any other case we potentially can lend the VR hardware
+  setup.
 
 Simple tasks for newcomers
 --------------------------
@@ -34,6 +49,11 @@
 * Implement AF_XXX packet types of sockets:
   https://bitbucket.org/pypy/pypy/issue/1942/support-for-af_xxx-sockets
 
+* Help with documentation. One task would be to document rpython configuration
+  options currently listed only on :doc:`this site <configuration>` also on the
+  RPython_ documentation site.
+
+.. _RPython: http://rpython.readthedocs.io
 
 Mid-to-large tasks
 ------------------
@@ -201,7 +221,9 @@
 Introduce new benchmarks
 ------------------------
 
-We're usually happy to introduce new benchmarks. Please consult us
+Our benchmark runner_ is showing its age. We should merge with the `CPython 
site`_
+
+Additionally, we're usually happy to introduce new benchmarks. Please consult 
us
 before, but in general something that's real-world python code
 and is not already represented is welcome. We need at least a standalone
 script that can run without parameters. Example ideas (benchmarks need
@@ -209,6 +231,8 @@
 
 * `hg`
 
+.. _runner: http://speed.pypy.org
+.. _`CPython site`: https://speed.python.org/
 
 ======================================
 Make more python modules pypy-friendly
@@ -238,15 +262,6 @@
 using more pypy-friendly technologies, e.g. cffi. Here is a partial list of
 good work that needs to be finished:
 
-**matplotlib** https://github.com/matplotlib/matplotlib
-
-    Status: using the matplotlib branch of PyPy and the tkagg-cffi branch of
-    matplotlib from https://github.com/mattip/matplotlib/tree/tkagg-cffi, the
-    tkagg backend can function.
-
-    TODO: the matplotlib branch passes numpy arrays by value (copying all the
-    data), this proof-of-concept needs help to become completely compliant
-
 **wxPython** https://bitbucket.org/amauryfa/wxpython-cffi
 
     Status: A project by a PyPy developer to adapt the Phoenix sip build 
system to cffi
diff --git a/pypy/doc/whatsnew-head.rst b/pypy/doc/whatsnew-head.rst
--- a/pypy/doc/whatsnew-head.rst
+++ b/pypy/doc/whatsnew-head.rst
@@ -14,3 +14,29 @@
 .. branch: cpyext-datetime2
 
 Support ``tzinfo`` field on C-API datetime objects, fixes latest pandas HEAD
+
+
+.. branch: mapdict-size-limit
+
+Fix a corner case of mapdict: When an instance is used like a dict (using
+``setattr`` and ``getattr``, or ``.__dict__``) and a lot of attributes are
+added, then the performance using mapdict is linear in the number of
+attributes. This is now fixed (by switching to a regular dict after 80
+attributes).
+
+
+.. branch: cpyext-faster-arg-passing
+
+When using cpyext, improve the speed of passing certain objects from PyPy to C
+code, most notably None, True, False, types, all instances of C-defined types.
+Before, a dict lookup was needed every time such an object crossed over, now it
+is just a field read.
+
+
+.. branch: 2634_datetime_timedelta_performance
+
+Improve datetime + timedelta performance.
+
+.. branch: memory-accounting
+
+Improve way to describe memory
diff --git a/pypy/interpreter/astcompiler/astbuilder.py 
b/pypy/interpreter/astcompiler/astbuilder.py
--- a/pypy/interpreter/astcompiler/astbuilder.py
+++ b/pypy/interpreter/astcompiler/astbuilder.py
@@ -1,23 +1,15 @@
 from pypy.interpreter.astcompiler import ast, consts, misc
 from pypy.interpreter.astcompiler import asthelpers # Side effects
-from pypy.interpreter.astcompiler import fstring
 from pypy.interpreter import error
 from pypy.interpreter.pyparser.pygram import syms, tokens
 from pypy.interpreter.pyparser.error import SyntaxError
-from rpython.rlib.objectmodel import always_inline, we_are_translated
+from pypy.interpreter.pyparser import parsestring
+from rpython.rlib.objectmodel import specialize
 
 
-def ast_from_node(space, node, compile_info, recursive_parser=None):
+def ast_from_node(space, node, compile_info):
     """Turn a parse tree, node, to AST."""
-    ast = ASTBuilder(space, node, compile_info, recursive_parser).build_ast()
-    #
-    # When we are not translated, we send this ast to validate_ast.
-    # The goal is to check that validate_ast doesn't crash on valid
-    # asts, at least.
-    if not we_are_translated():
-        from pypy.interpreter.astcompiler import validate
-        validate.validate_ast(space, ast)
-    return ast
+    return ASTBuilder(space, node, compile_info).build_ast()
 
 
 augassign_operator_map = {
@@ -26,7 +18,6 @@
     '/='  : ast.Div,
     '//=' : ast.FloorDiv,
     '%='  : ast.Mod,
-    '@='  : ast.MatMult,
     '<<='  : ast.LShift,
     '>>='  : ast.RShift,
     '&='  : ast.BitAnd,
@@ -47,18 +38,16 @@
     tokens.STAR : ast.Mult,
     tokens.SLASH : ast.Div,
     tokens.DOUBLESLASH : ast.FloorDiv,
-    tokens.PERCENT : ast.Mod,
-    tokens.AT : ast.MatMult
+    tokens.PERCENT : ast.Mod
 })
 
 
 class ASTBuilder(object):
 
-    def __init__(self, space, n, compile_info, recursive_parser=None):
+    def __init__(self, space, n, compile_info):
         self.space = space
         self.compile_info = compile_info
         self.root_node = n
-        self.recursive_parser = recursive_parser
 
     def build_ast(self):
         """Convert an top level parse tree node into an AST mod."""
@@ -129,9 +118,6 @@
         except misc.ForbiddenNameAssignment as e:
             self.error("cannot assign to %s" % (e.name,), node)
 
-    def new_identifier(self, name):
-        return misc.new_identifier(self.space, name)
-
     def set_context(self, expr, ctx):
         """Set the context of an expression to Store or Del if possible."""
         try:
@@ -141,6 +127,23 @@
         except misc.ForbiddenNameAssignment as e:
             self.error_ast("cannot assign to %s" % (e.name,), e.node)
 
+    def handle_print_stmt(self, print_node):
+        dest = None
+        expressions = None
+        newline = True
+        start = 1
+        child_count = print_node.num_children()
+        if child_count > 2 and print_node.get_child(1).type == 
tokens.RIGHTSHIFT:
+            dest = self.handle_expr(print_node.get_child(2))
+            start = 4
+        if (child_count + 1 - start) // 2:
+            expressions = [self.handle_expr(print_node.get_child(i))
+                           for i in range(start, child_count, 2)]
+        if print_node.get_child(-1).type == tokens.COMMA:
+            newline = False
+        return ast.Print(dest, expressions, newline, print_node.get_lineno(),
+                         print_node.get_column())
+
     def handle_del_stmt(self, del_node):
         targets = self.handle_exprlist(del_node.get_child(1), ast.Del)
         return ast.Delete(targets, del_node.get_lineno(), 
del_node.get_column())
@@ -163,13 +166,17 @@
             return ast.Return(values, flow_node.get_lineno(), 
flow_node.get_column())
         elif first_child_type == syms.raise_stmt:
             exc = None
-            cause = None
+            value = None
+            traceback = None
             child_count = first_child.num_children()
             if child_count >= 2:
                 exc = self.handle_expr(first_child.get_child(1))
             if child_count >= 4:
-                cause = self.handle_expr(first_child.get_child(3))
-            return ast.Raise(exc, cause, flow_node.get_lineno(), 
flow_node.get_column())
+                value = self.handle_expr(first_child.get_child(3))
+            if child_count == 6:
+                traceback = self.handle_expr(first_child.get_child(5))
+            return ast.Raise(exc, value, traceback, flow_node.get_lineno(),
+                             flow_node.get_column())
         else:
             raise AssertionError("unknown flow statement")
 
@@ -177,10 +184,9 @@
         while True:
             import_name_type = import_name.type
             if import_name_type == syms.import_as_name:
-                name = 
self.new_identifier(import_name.get_child(0).get_value())
+                name = import_name.get_child(0).get_value()
                 if import_name.num_children() == 3:
-                    as_name = self.new_identifier(
-                        import_name.get_child(2).get_value())
+                    as_name = import_name.get_child(2).get_value()
                     self.check_forbidden_name(as_name, 
import_name.get_child(2))
                 else:
                     as_name = None
@@ -193,12 +199,12 @@
                 alias = self.alias_for_import_name(import_name.get_child(0),
                                                    store=False)
                 asname_node = import_name.get_child(2)
-                alias.asname = self.new_identifier(asname_node.get_value())
+                alias.asname = asname_node.get_value()
                 self.check_forbidden_name(alias.asname, asname_node)
                 return alias
             elif import_name_type == syms.dotted_name:
                 if import_name.num_children() == 1:
-                    name = 
self.new_identifier(import_name.get_child(0).get_value())
+                    name = import_name.get_child(0).get_value()
                     if store:
                         self.check_forbidden_name(name, 
import_name.get_child(0))
                     return ast.alias(name, None)
@@ -226,15 +232,11 @@
             dot_count = 0
             while i < child_count:
                 child = import_node.get_child(i)
-                child_type = child.type
-                if child_type == syms.dotted_name:
+                if child.type == syms.dotted_name:
                     module = self.alias_for_import_name(child, False)
                     i += 1
                     break
-                elif child_type == tokens.ELLIPSIS:
-                    # Special case for tokenization.
-                    dot_count += 2
-                elif child_type != tokens.DOT:
+                elif child.type != tokens.DOT:
                     break
                 i += 1
                 dot_count += 1
@@ -266,14 +268,28 @@
             raise AssertionError("unknown import node")
 
     def handle_global_stmt(self, global_node):
-        names = [self.new_identifier(global_node.get_child(i).get_value())
+        names = [global_node.get_child(i).get_value()
                  for i in range(1, global_node.num_children(), 2)]
         return ast.Global(names, global_node.get_lineno(), 
global_node.get_column())
 
-    def handle_nonlocal_stmt(self, nonlocal_node):
-        names = [self.new_identifier(nonlocal_node.get_child(i).get_value())
-                 for i in range(1, nonlocal_node.num_children(), 2)]
-        return ast.Nonlocal(names, nonlocal_node.get_lineno(), 
nonlocal_node.get_column())
+    def handle_exec_stmt(self, exec_node):
+        child_count = exec_node.num_children()
+        globs = None
+        locs = None
+        to_execute = self.handle_expr(exec_node.get_child(1))
+        if child_count < 4:
+            if isinstance(to_execute, ast.Tuple) and \
+                    (len(to_execute.elts) == 2 or len(to_execute.elts) == 3):
+                globs = to_execute.elts[1]
+                if len(to_execute.elts) == 3:
+                    locs = to_execute.elts[2]
+                to_execute = to_execute.elts[0]
+        elif child_count >= 4:
+            globs = self.handle_expr(exec_node.get_child(3))
+            if child_count == 6:
+                locs = self.handle_expr(exec_node.get_child(5))
+        return ast.Exec(to_execute, globs, locs, exec_node.get_lineno(),
+                        exec_node.get_column())
 
     def handle_assert_stmt(self, assert_node):
         expr = self.handle_expr(assert_node.get_child(1))
@@ -363,7 +379,7 @@
         return ast.While(loop_test, body, otherwise, while_node.get_lineno(),
                          while_node.get_column())
 
-    def handle_for_stmt(self, for_node, is_async):
+    def handle_for_stmt(self, for_node):
         target_node = for_node.get_child(1)
         target_as_exprlist = self.handle_exprlist(target_node, ast.Store)
         if target_node.num_children() == 1:
@@ -377,25 +393,21 @@
             otherwise = self.handle_suite(for_node.get_child(8))
         else:
             otherwise = None
-        if is_async:
-            return ast.AsyncFor(target, expr, body, otherwise, 
for_node.get_lineno(),
-                                for_node.get_column())
-        else:
-            return ast.For(target, expr, body, otherwise, 
for_node.get_lineno(),
-                           for_node.get_column())
+        return ast.For(target, expr, body, otherwise, for_node.get_lineno(),
+                       for_node.get_column())
 
     def handle_except_clause(self, exc, body):
         test = None
-        name = None
+        target = None
         suite = self.handle_suite(body)
         child_count = exc.num_children()
         if child_count >= 2:
             test = self.handle_expr(exc.get_child(1))
         if child_count == 4:
-            name_node = exc.get_child(3)
-            name = self.new_identifier(name_node.get_value())
-            self.check_forbidden_name(name, name_node)
-        return ast.ExceptHandler(test, name, suite, exc.get_lineno(), 
exc.get_column())
+            target_child = exc.get_child(3)
+            target = self.handle_expr(target_child)
+            self.set_context(target, ast.Store)
+        return ast.ExceptHandler(test, target, suite, exc.get_lineno(), 
exc.get_column())
 
     def handle_try_stmt(self, try_node):
         body = self.handle_suite(try_node.get_child(2))
@@ -415,100 +427,70 @@
             else:
                 otherwise = self.handle_suite(try_node.get_child(-1))
                 except_count -= 1
-        handlers = []
         if except_count:
+            handlers = []
             for i in range(except_count):
                 base_offset = i * 3
                 exc = try_node.get_child(3 + base_offset)
                 except_body = try_node.get_child(5 + base_offset)
                 handlers.append(self.handle_except_clause(exc, except_body))
-        return ast.Try(body, handlers, otherwise, finally_suite,
-                       try_node.get_lineno(), try_node.get_column())
+            except_ast = ast.TryExcept(body, handlers, otherwise,
+                                       try_node.get_lineno(), 
try_node.get_column())
+            if finally_suite is None:
+                return except_ast
+            body = [except_ast]
+        return ast.TryFinally(body, finally_suite, try_node.get_lineno(),
+                              try_node.get_column())
 
-    def handle_with_item(self, item_node):
-        test = self.handle_expr(item_node.get_child(0))
-        if item_node.num_children() == 3:
-            target = self.handle_expr(item_node.get_child(2))
-            self.set_context(target, ast.Store)
-        else:
-            target = None
-        return ast.withitem(test, target)
-
-    def handle_with_stmt(self, with_node, is_async):
+    def handle_with_stmt(self, with_node):
         body = self.handle_suite(with_node.get_child(-1))
-        items = [self.handle_with_item(with_node.get_child(i))
-                 for i in range(1, with_node.num_children()-2, 2)]
-        if is_async:
-            return ast.AsyncWith(items, body, with_node.get_lineno(),
-                                 with_node.get_column())
-        else:
-            return ast.With(items, body, with_node.get_lineno(),
-                            with_node.get_column())
+        i = with_node.num_children() - 1
+        while True:
+            i -= 2
+            item = with_node.get_child(i)
+            test = self.handle_expr(item.get_child(0))
+            if item.num_children() == 3:
+                target = self.handle_expr(item.get_child(2))
+                self.set_context(target, ast.Store)
+            else:
+                target = None
+            wi = ast.With(test, target, body, with_node.get_lineno(),
+                          with_node.get_column())
+            if i == 1:
+                break
+            body = [wi]
+        return wi
 
     def handle_classdef(self, classdef_node, decorators=None):
         name_node = classdef_node.get_child(1)
-        name = self.new_identifier(name_node.get_value())
+        name = name_node.get_value()
         self.check_forbidden_name(name, name_node)
         if classdef_node.num_children() == 4:
-            # class NAME ':' suite
             body = self.handle_suite(classdef_node.get_child(3))
-            return ast.ClassDef(name, None, None, body, decorators,
+            return ast.ClassDef(name, None, body, decorators,
                                 classdef_node.get_lineno(), 
classdef_node.get_column())
         if classdef_node.get_child(3).type == tokens.RPAR:
-            # class NAME '(' ')' ':' suite
             body = self.handle_suite(classdef_node.get_child(5))
-            return ast.ClassDef(name, None, None, body, decorators,
+            return ast.ClassDef(name, None, body, decorators,
                                 classdef_node.get_lineno(), 
classdef_node.get_column())
-
-        # class NAME '(' arglist ')' ':' suite
-        # build up a fake Call node so we can extract its pieces
-        call_name = ast.Name(name, ast.Load, classdef_node.get_lineno(),
-                             classdef_node.get_column())
-        call = self.handle_call(classdef_node.get_child(3), call_name)
+        bases = self.handle_class_bases(classdef_node.get_child(3))
         body = self.handle_suite(classdef_node.get_child(6))
-        return ast.ClassDef(
-            name, call.args, call.keywords,
-            body, decorators, classdef_node.get_lineno(), 
classdef_node.get_column())
+        return ast.ClassDef(name, bases, body, decorators, 
classdef_node.get_lineno(),
+                            classdef_node.get_column())
 
     def handle_class_bases(self, bases_node):
         if bases_node.num_children() == 1:
             return [self.handle_expr(bases_node.get_child(0))]
         return self.get_expression_list(bases_node)
 
-    def handle_funcdef_impl(self, funcdef_node, is_async, decorators=None):
+    def handle_funcdef(self, funcdef_node, decorators=None):
         name_node = funcdef_node.get_child(1)
-        name = self.new_identifier(name_node.get_value())
+        name = name_node.get_value()
         self.check_forbidden_name(name, name_node)
         args = self.handle_arguments(funcdef_node.get_child(2))
-        suite = 4
-        returns = None
-        if funcdef_node.get_child(3).type == tokens.RARROW:
-            returns = self.handle_expr(funcdef_node.get_child(4))
-            suite += 2
-        body = self.handle_suite(funcdef_node.get_child(suite))
-        if is_async:
-            return ast.AsyncFunctionDef(name, args, body, decorators, returns,
-                                        funcdef_node.get_lineno(), 
funcdef_node.get_column())
-        else:
-            return ast.FunctionDef(name, args, body, decorators, returns,
-                                   funcdef_node.get_lineno(), 
funcdef_node.get_column())
-
-    def handle_async_funcdef(self, node, decorators=None):
-        return self.handle_funcdef_impl(node.get_child(1), 1, decorators)
-    
-    def handle_funcdef(self, node, decorators=None):
-        return self.handle_funcdef_impl(node, 0, decorators)
-    
-    def handle_async_stmt(self, node):
-        ch = node.get_child(1)
-        if ch.type == syms.funcdef:
-            return self.handle_funcdef_impl(ch, 1)
-        elif ch.type == syms.with_stmt:
-            return self.handle_with_stmt(ch, 1)
-        elif ch.type == syms.for_stmt:
-            return self.handle_for_stmt(ch, 1)
-        else:
-            raise AssertionError("invalid async statement")
+        body = self.handle_suite(funcdef_node.get_child(4))
+        return ast.FunctionDef(name, args, body, decorators,
+                               funcdef_node.get_lineno(), 
funcdef_node.get_column())
 
     def handle_decorated(self, decorated_node):
         decorators = self.handle_decorators(decorated_node.get_child(0))
@@ -517,8 +499,6 @@
             node = self.handle_funcdef(definition, decorators)
         elif definition.type == syms.classdef:
             node = self.handle_classdef(definition, decorators)
-        elif definition.type == syms.async_funcdef:
-            node = self.handle_async_funcdef(definition, decorators)
         else:
             raise AssertionError("unkown decorated")
         node.lineno = decorated_node.get_lineno()
@@ -534,152 +514,110 @@
         if decorator_node.num_children() == 3:
             dec = dec_name
         elif decorator_node.num_children() == 5:
-            dec = ast.Call(dec_name, None, None,
+            dec = ast.Call(dec_name, None, None, None, None,
                            decorator_node.get_lineno(), 
decorator_node.get_column())
         else:
             dec = self.handle_call(decorator_node.get_child(3), dec_name)
         return dec
 
     def handle_dotted_name(self, dotted_name_node):
-        base_value = 
self.new_identifier(dotted_name_node.get_child(0).get_value())
+        base_value = dotted_name_node.get_child(0).get_value()
         name = ast.Name(base_value, ast.Load, dotted_name_node.get_lineno(),
                         dotted_name_node.get_column())
         for i in range(2, dotted_name_node.num_children(), 2):
             attr = dotted_name_node.get_child(i).get_value()
-            attr = self.new_identifier(attr)
             name = ast.Attribute(name, attr, ast.Load, 
dotted_name_node.get_lineno(),
                                  dotted_name_node.get_column())
         return name
 
     def handle_arguments(self, arguments_node):
-        # This function handles both typedargslist (function definition)
-        # and varargslist (lambda definition).
         if arguments_node.type == syms.parameters:
             if arguments_node.num_children() == 2:
-                return ast.arguments(None, None, None, None, None, None)
+                return ast.arguments(None, None, None, None)
             arguments_node = arguments_node.get_child(1)
         i = 0
         child_count = arguments_node.num_children()
-        n_pos = 0
-        n_pos_def = 0
-        n_kwdonly = 0
-        # scan args
-        while i < child_count:
-            arg_type = arguments_node.get_child(i).type
-            if arg_type == tokens.STAR:
-                i += 1
-                if i < child_count:
-                    next_arg_type = arguments_node.get_child(i).type
-                    if (next_arg_type == syms.tfpdef or
-                        next_arg_type == syms.vfpdef):
-                        i += 1
-                break
-            if arg_type == tokens.DOUBLESTAR:
-                break
-            if arg_type == syms.vfpdef or arg_type == syms.tfpdef:
-                n_pos += 1
-            if arg_type == tokens.EQUAL:
-                n_pos_def += 1
-            i += 1
-        while i < child_count:
-            arg_type = arguments_node.get_child(i).type
-            if arg_type == tokens.DOUBLESTAR:
-                break
-            if arg_type == syms.vfpdef or arg_type == syms.tfpdef:
-                n_kwdonly += 1
-            i += 1
-        pos = []
-        posdefaults = []
-        kwonly = [] if n_kwdonly else None
-        kwdefaults = []
-        kwarg = None
-        vararg = None
-        if n_pos + n_kwdonly > 255:
-            self.error("more than 255 arguments", arguments_node)
-        # process args
-        i = 0
+        defaults = []
+        args = []
+        variable_arg = None
+        keywords_arg = None
         have_default = False
         while i < child_count:
-            arg = arguments_node.get_child(i)
-            arg_type = arg.type
-            if arg_type == syms.tfpdef or arg_type == syms.vfpdef:
-                if i + 1 < child_count and \
-                        arguments_node.get_child(i + 1).type == tokens.EQUAL:
-                    default_node = arguments_node.get_child(i + 2)
-                    posdefaults.append(self.handle_expr(default_node))
+            argument = arguments_node.get_child(i)
+            arg_type = argument.type
+            if arg_type == syms.fpdef:
+                parenthesized = False
+                complex_args = False
+                while True:
+                    if i + 1 < child_count and \
+                            arguments_node.get_child(i + 1).type == 
tokens.EQUAL:
+                        default_node = arguments_node.get_child(i + 2)
+                        defaults.append(self.handle_expr(default_node))
+                        i += 2
+                        have_default = True
+                    elif have_default:
+                        if parenthesized and not complex_args:
+                            msg = "parenthesized arg with default"
+                        else:
+                            msg = ("non-default argument follows default "
+                                   "argument")
+                        self.error(msg, arguments_node)
+                    if argument.num_children() == 3:
+                        sub_arg = argument.get_child(1)
+                        if sub_arg.num_children() != 1:
+                            complex_args = True
+                            args.append(self.handle_arg_unpacking(sub_arg))
+                        else:
+                            parenthesized = True
+                            argument = sub_arg.get_child(0)
+                            continue
+                    if argument.get_child(0).type == tokens.NAME:
+                        name_node = argument.get_child(0)
+                        arg_name = name_node.get_value()
+                        self.check_forbidden_name(arg_name, name_node)
+                        name = ast.Name(arg_name, ast.Param, 
name_node.get_lineno(),
+                                        name_node.get_column())
+                        args.append(name)
                     i += 2
-                    have_default = True
-                elif have_default:
-                    msg = "non-default argument follows default argument"
-                    self.error(msg, arguments_node)
-                pos.append(self.handle_arg(arg))
-                i += 2
+                    break
             elif arg_type == tokens.STAR:
-                if i + 1 >= child_count:
-                    self.error("named arguments must follow bare *",
-                               arguments_node)
                 name_node = arguments_node.get_child(i + 1)
-                keywordonly_args = []
-                if name_node.type == tokens.COMMA:
-                    i += 2
-                    i = self.handle_keywordonly_args(arguments_node, i, kwonly,
-                                                     kwdefaults)
-                else:
-                    vararg = self.handle_arg(name_node)
-                    i += 3
-                    if i < child_count:
-                        next_arg_type = arguments_node.get_child(i).type
-                        if (next_arg_type == syms.tfpdef or
-                            next_arg_type == syms.vfpdef):
-                            i = self.handle_keywordonly_args(arguments_node, i,
-                                                             kwonly, 
kwdefaults)
+                variable_arg = name_node.get_value()
+                self.check_forbidden_name(variable_arg, name_node)
+                i += 3
             elif arg_type == tokens.DOUBLESTAR:
                 name_node = arguments_node.get_child(i + 1)
-                kwarg = self.handle_arg(name_node)
+                keywords_arg = name_node.get_value()
+                self.check_forbidden_name(keywords_arg, name_node)
                 i += 3
             else:
                 raise AssertionError("unknown node in argument list")
-        return ast.arguments(pos, vararg, kwonly, kwdefaults, kwarg,
-                             posdefaults)
+        if not defaults:
+            defaults = None
+        if not args:
+            args = None
+        return ast.arguments(args, variable_arg, keywords_arg, defaults)
 
-    def handle_keywordonly_args(self, arguments_node, i, kwonly, kwdefaults):
-        if kwonly is None:
-            self.error("named arguments must follows bare *",
-                       arguments_node.get_child(i))
-        child_count = arguments_node.num_children()
-        while i < child_count:
-            arg = arguments_node.get_child(i)
-            arg_type = arg.type
-            if arg_type == syms.vfpdef or arg_type == syms.tfpdef:
-                if (i + 1 < child_count and
-                    arguments_node.get_child(i + 1).type == tokens.EQUAL):
-                    expr = self.handle_expr(arguments_node.get_child(i + 2))
-                    kwdefaults.append(expr)
-                    i += 2
+    def handle_arg_unpacking(self, fplist_node):
+        args = []
+        for i in range((fplist_node.num_children() + 1) / 2):
+            fpdef_node = fplist_node.get_child(i * 2)
+            while True:
+                child = fpdef_node.get_child(0)
+                if child.type == tokens.NAME:
+                    arg = ast.Name(child.get_value(), ast.Store, 
child.get_lineno(),
+                                   child.get_column())
+                    args.append(arg)
                 else:
-                    kwdefaults.append(None)
-                ann = None
-                if arg.num_children() == 3:
-                    ann = self.handle_expr(arg.get_child(2))
-                name_node = arg.get_child(0)
-                argname = name_node.get_value()
-                argname = self.new_identifier(argname)
-                self.check_forbidden_name(argname, name_node)
-                kwonly.append(ast.arg(argname, ann, arg.get_lineno(),
-                                                    arg.get_column()))
-                i += 2
-            elif arg_type == tokens.DOUBLESTAR:
-                return i
-        return i
-
-    def handle_arg(self, arg_node):
-        name_node = arg_node.get_child(0)
-        name = self.new_identifier(name_node.get_value())
-        self.check_forbidden_name(name, arg_node)
-        ann = None
-        if arg_node.num_children() == 3:
-            ann = self.handle_expr(arg_node.get_child(2))
-        return ast.arg(name, ann, arg_node.get_lineno(), arg_node.get_column())
+                    child = fpdef_node.get_child(1)
+                    if child.num_children() == 1:
+                        fpdef_node = child.get_child(0)
+                        continue
+                    args.append(self.handle_arg_unpacking(child))
+                break
+        tup = ast.Tuple(args, ast.Store, fplist_node.get_lineno(), 
fplist_node.get_column())
+        self.set_context(tup, ast.Store)
+        return tup
 
     def handle_stmt(self, stmt):
         stmt_type = stmt.type
@@ -694,6 +632,8 @@
             stmt_type = stmt.type
             if stmt_type == syms.expr_stmt:
                 return self.handle_expr_stmt(stmt)
+            elif stmt_type == syms.print_stmt:
+                return self.handle_print_stmt(stmt)
             elif stmt_type == syms.del_stmt:
                 return self.handle_del_stmt(stmt)
             elif stmt_type == syms.pass_stmt:
@@ -704,10 +644,10 @@
                 return self.handle_import_stmt(stmt)
             elif stmt_type == syms.global_stmt:
                 return self.handle_global_stmt(stmt)
-            elif stmt_type == syms.nonlocal_stmt:
-                return self.handle_nonlocal_stmt(stmt)
             elif stmt_type == syms.assert_stmt:
                 return self.handle_assert_stmt(stmt)
+            elif stmt_type == syms.exec_stmt:
+                return self.handle_exec_stmt(stmt)
             else:
                 raise AssertionError("unhandled small statement")
         elif stmt_type == syms.compound_stmt:
@@ -718,19 +658,17 @@
             elif stmt_type == syms.while_stmt:
                 return self.handle_while_stmt(stmt)
             elif stmt_type == syms.for_stmt:
-                return self.handle_for_stmt(stmt, 0)
+                return self.handle_for_stmt(stmt)
             elif stmt_type == syms.try_stmt:
                 return self.handle_try_stmt(stmt)
             elif stmt_type == syms.with_stmt:
-                return self.handle_with_stmt(stmt, 0)
+                return self.handle_with_stmt(stmt)
             elif stmt_type == syms.funcdef:
                 return self.handle_funcdef(stmt)
             elif stmt_type == syms.classdef:
                 return self.handle_classdef(stmt)
             elif stmt_type == syms.decorated:
                 return self.handle_decorated(stmt)
-            elif stmt_type == syms.async_stmt:
-                return self.handle_async_stmt(stmt)
             else:
                 raise AssertionError("unhandled compound statement")
         else:
@@ -760,13 +698,12 @@
             for i in range(0, stmt.num_children() - 2, 2):
                 target_node = stmt.get_child(i)
                 if target_node.type == syms.yield_expr:
-                    self.error("assignment to yield expression not possible",
-                               target_node)
+                    self.error("can't assign to yield expression", target_node)
                 target_expr = self.handle_testlist(target_node)
                 self.set_context(target_expr, ast.Store)
                 targets.append(target_expr)
             value_child = stmt.get_child(-1)
-            if value_child.type == syms.testlist_star_expr:
+            if value_child.type == syms.testlist:
                 value_expr = self.handle_testlist(value_child)
             else:
                 value_expr = self.handle_expr(value_child)
@@ -787,9 +724,9 @@
         # Loop until we return something.
         while True:
             expr_node_type = expr_node.type
-            if expr_node_type == syms.test or expr_node_type == 
syms.test_nocond:
+            if expr_node_type == syms.test or expr_node_type == syms.old_test:
                 first_child = expr_node.get_child(0)
-                if first_child.type in (syms.lambdef, syms.lambdef_nocond):
+                if first_child.type in (syms.lambdef, syms.old_lambdef):
                     return self.handle_lambdef(first_child)
                 elif expr_node.num_children() > 1:
                     return self.handle_ifexp(expr_node)
@@ -826,8 +763,6 @@
                     operands.append(self.handle_expr(expr_node.get_child(i + 
1)))
                 return ast.Compare(expr, operators, operands, 
expr_node.get_lineno(),
                                    expr_node.get_column())
-            elif expr_node_type == syms.star_expr:
-                return self.handle_star_expr(expr_node)
             elif expr_node_type == syms.expr or \
                     expr_node_type == syms.xor_expr or \
                     expr_node_type == syms.and_expr or \
@@ -839,19 +774,11 @@
                     continue
                 return self.handle_binop(expr_node)
             elif expr_node_type == syms.yield_expr:
-                is_from = False
-                if expr_node.num_children() > 1:
-                    arg_node = expr_node.get_child(1)  # yield arg
-                    if arg_node.num_children() == 2:
-                        is_from = True
-                        expr = self.handle_expr(arg_node.get_child(1))
-                    else:
-                        expr = self.handle_testlist(arg_node.get_child(0))
+                if expr_node.num_children() == 2:
+                    exp = self.handle_testlist(expr_node.get_child(1))
                 else:
-                    expr = None
-                if is_from:
-                    return ast.YieldFrom(expr, expr_node.get_lineno(), 
expr_node.get_column())
-                return ast.Yield(expr, expr_node.get_lineno(), 
expr_node.get_column())
+                    exp = None
+                return ast.Yield(exp, expr_node.get_lineno(), 
expr_node.get_column())
             elif expr_node_type == syms.factor:
                 if expr_node.num_children() == 1:
                     expr_node = expr_node.get_child(0)
@@ -862,15 +789,10 @@
             else:
                 raise AssertionError("unknown expr")
 
-    def handle_star_expr(self, star_expr_node):
-        expr = self.handle_expr(star_expr_node.get_child(1))
-        return ast.Starred(expr, ast.Load, star_expr_node.get_lineno(),
-                           star_expr_node.get_column())
-
     def handle_lambdef(self, lambdef_node):
         expr = self.handle_expr(lambdef_node.get_child(-1))
         if lambdef_node.num_children() == 3:
-            args = ast.arguments(None, None, None, None, None, None)
+            args = ast.arguments(None, None, None, None)
         else:
             args = self.handle_arguments(lambdef_node.get_child(1))
         return ast.Lambda(args, expr, lambdef_node.get_lineno(), 
lambdef_node.get_column())
@@ -897,11 +819,6 @@
             elif comp_type == tokens.GREATEREQUAL:
                 return ast.GtE
             elif comp_type == tokens.NOTEQUAL:
-                flufl = self.compile_info.flags & 
consts.CO_FUTURE_BARRY_AS_BDFL
-                if flufl and comp_node.get_value() == '!=':
-                    self.error('invalid comparison', comp_node)
-                elif not flufl and comp_node.get_value() == '<>':
-                    self.error('invalid comparison', comp_node)
                 return ast.NotEq
             elif comp_type == tokens.NAME:
                 if comp_node.get_value() == "is":
@@ -937,6 +854,20 @@
 
     def handle_factor(self, factor_node):
         from pypy.interpreter.pyparser.parser import Terminal
+        # Fold '-' on constant numbers.
+        if factor_node.get_child(0).type == tokens.MINUS and \
+                factor_node.num_children() == 2:
+            factor = factor_node.get_child(1)
+            if factor.type == syms.factor and factor.num_children() == 1:
+                power = factor.get_child(0)
+                if power.type == syms.power and power.num_children() == 1:
+                    atom = power.get_child(0)
+                    if atom.type == syms.atom and \
+                            atom.get_child(0).type == tokens.NUMBER:
+                        num = atom.get_child(0)
+                        assert isinstance(num, Terminal)
+                        num.value = "-" + num.get_value()
+                        return self.handle_atom(atom)
         expr = self.handle_expr(factor_node.get_child(1))
         op_type = factor_node.get_child(0).type
         if op_type == tokens.PLUS:
@@ -949,35 +880,18 @@
             raise AssertionError("invalid factor node")
         return ast.UnaryOp(op, expr, factor_node.get_lineno(), 
factor_node.get_column())
 
-    def handle_atom_expr(self, atom_node):
-        start = 0
-        num_ch = atom_node.num_children()
-        if atom_node.get_child(0).type == tokens.AWAIT:
-            start = 1
-        atom_expr = self.handle_atom(atom_node.get_child(start))
-        if num_ch == 1:
+    def handle_power(self, power_node):
+        atom_expr = self.handle_atom(power_node.get_child(0))
+        if power_node.num_children() == 1:
             return atom_expr
-        if start and num_ch == 2:
-            return ast.Await(atom_expr, atom_node.get_lineno(),
-                             atom_node.get_column())
-        for i in range(start+1, num_ch):
-            trailer = atom_node.get_child(i)
+        for i in range(1, power_node.num_children()):
+            trailer = power_node.get_child(i)
             if trailer.type != syms.trailer:
                 break
             tmp_atom_expr = self.handle_trailer(trailer, atom_expr)
             tmp_atom_expr.lineno = atom_expr.lineno
             tmp_atom_expr.col_offset = atom_expr.col_offset
             atom_expr = tmp_atom_expr
-        if start:
-            return ast.Await(atom_expr, atom_node.get_lineno(),
-                             atom_node.get_column())
-        else:
-            return atom_expr
-    
-    def handle_power(self, power_node):
-        atom_expr = self.handle_atom_expr(power_node.get_child(0))
-        if power_node.num_children() == 1:
-            return atom_expr
         if power_node.get_child(-1).type == syms.factor:
             right = self.handle_expr(power_node.get_child(-1))
             atom_expr = ast.BinOp(atom_expr, ast.Pow, right, 
power_node.get_lineno(),
@@ -986,6 +900,8 @@
 
     def handle_slice(self, slice_node):
         first_child = slice_node.get_child(0)
+        if first_child.type == tokens.DOT:
+            return ast.Ellipsis()
         if slice_node.num_children() == 1 and first_child.type == syms.test:
             index = self.handle_expr(first_child)
             return ast.Index(index)
@@ -1005,7 +921,10 @@
                 upper = self.handle_expr(third_child)
         last_child = slice_node.get_child(-1)
         if last_child.type == syms.sliceop:
-            if last_child.num_children() != 1:
+            if last_child.num_children() == 1:
+                step = ast.Name("None", ast.Load, last_child.get_lineno(),
+                                last_child.get_column())
+            else:
                 step_child = last_child.get_child(1)
                 if step_child.type == syms.test:
                     step = self.handle_expr(step_child)
@@ -1015,12 +934,12 @@
         first_child = trailer_node.get_child(0)
         if first_child.type == tokens.LPAR:
             if trailer_node.num_children() == 2:
-                return ast.Call(left_expr, None, None,
+                return ast.Call(left_expr, None, None, None, None,
                                 trailer_node.get_lineno(), 
trailer_node.get_column())
             else:
                 return self.handle_call(trailer_node.get_child(1), left_expr)
         elif first_child.type == tokens.DOT:
-            attr = self.new_identifier(trailer_node.get_child(1).get_value())
+            attr = trailer_node.get_child(1).get_value()
             return ast.Attribute(left_expr, attr, ast.Load,
                                  trailer_node.get_lineno(), 
trailer_node.get_column())
         else:
@@ -1049,9 +968,9 @@
                                  middle.get_lineno(), middle.get_column())
 
     def handle_call(self, args_node, callable_expr):
-        arg_count = 0 # position args + iterable args unpackings
-        keyword_count = 0 # keyword args + keyword args unpackings
-        generator_count = 0 
+        arg_count = 0
+        keyword_count = 0
+        generator_count = 0
         for i in range(args_node.num_children()):
             argument = args_node.get_child(i)
             if argument.type == syms.argument:
@@ -1059,11 +978,7 @@
                     arg_count += 1
                 elif argument.get_child(1).type == syms.comp_for:
                     generator_count += 1
-                elif argument.get_child(0).type == tokens.STAR:
-                    arg_count += 1
                 else:
-                    # argument.get_child(0).type == tokens.DOUBLESTAR
-                    # or keyword arg
                     keyword_count += 1
         if generator_count > 1 or \
                 (generator_count and (keyword_count or arg_count)):
@@ -1074,66 +989,53 @@
         args = []
         keywords = []
         used_keywords = {}
-        doublestars_count = 0 # just keyword argument unpackings
+        variable_arg = None
+        keywords_arg = None
         child_count = args_node.num_children()
         i = 0
         while i < child_count:
             argument = args_node.get_child(i)
             if argument.type == syms.argument:
-                expr_node = argument.get_child(0)
                 if argument.num_children() == 1:
-                    # a positional argument
+                    expr_node = argument.get_child(0)
                     if keywords:
-                        if doublestars_count:
-                            self.error("positional argument follows "
-                                       "keyword argument unpacking",
-                                       expr_node)
-                        else:
-                            self.error("positional argument follows "
-                                       "keyword argument",
-                                       expr_node)
+                        self.error("non-keyword arg after keyword arg",
+                                   expr_node)
+                    if variable_arg:
+                        self.error("only named arguments may follow "
+                                   "*expression", expr_node)
                     args.append(self.handle_expr(expr_node))
-                elif expr_node.type == tokens.STAR:
-                    # an iterable argument unpacking
-                    if doublestars_count:
-                        self.error("iterable argument unpacking follows "
-                                   "keyword argument unpacking",
-                                   expr_node)
-                    expr = self.handle_expr(argument.get_child(1))
-                    args.append(ast.Starred(expr, ast.Load,
-                                            expr_node.get_lineno(),
-                                            expr_node.get_column()))
-                elif expr_node.type == tokens.DOUBLESTAR:
-                    # a keyword argument unpacking
-                    i += 1
-                    expr = self.handle_expr(argument.get_child(1))
-                    keywords.append(ast.keyword(None, expr))
-                    doublestars_count += 1
                 elif argument.get_child(1).type == syms.comp_for:
-                    # the lone generator expression
                     args.append(self.handle_genexp(argument))
                 else:
-                    # a keyword argument
-                    keyword_expr = self.handle_expr(expr_node)
+                    keyword_node = argument.get_child(0)
+                    keyword_expr = self.handle_expr(keyword_node)
                     if isinstance(keyword_expr, ast.Lambda):
                         self.error("lambda cannot contain assignment",
-                                   expr_node)
+                                   keyword_node)
                     elif not isinstance(keyword_expr, ast.Name):
                         self.error("keyword can't be an expression",
-                                   expr_node)
+                                   keyword_node)
                     keyword = keyword_expr.id
                     if keyword in used_keywords:
-                        self.error("keyword argument repeated", expr_node)
+                        self.error("keyword argument repeated", keyword_node)
                     used_keywords[keyword] = None
-                    self.check_forbidden_name(keyword, expr_node)
+                    self.check_forbidden_name(keyword, keyword_node)
                     keyword_value = self.handle_expr(argument.get_child(2))
                     keywords.append(ast.keyword(keyword, keyword_value))
+            elif argument.type == tokens.STAR:
+                variable_arg = self.handle_expr(args_node.get_child(i + 1))
+                i += 1
+            elif argument.type == tokens.DOUBLESTAR:
+                keywords_arg = self.handle_expr(args_node.get_child(i + 1))
+                i += 1
             i += 1
         if not args:
             args = None
         if not keywords:
             keywords = None
-        return ast.Call(callable_expr, args, keywords, callable_expr.lineno,
+        return ast.Call(callable_expr, args, keywords, variable_arg,
+                        keywords_arg, callable_expr.lineno,
                         callable_expr.col_offset)
 
     def parse_number(self, raw):
@@ -1170,7 +1072,10 @@
             raw = "-" + raw
         w_num_str = self.space.newtext(raw)
         w_base = self.space.newint(base)
-        if raw[-1] in "jJ":
+        if raw[-1] in "lL":
+            tp = self.space.w_long
+            return self.space.call_function(tp, w_num_str, w_base)
+        elif raw[-1] in "jJ":
             tp = self.space.w_complex
             return self.space.call_function(tp, w_num_str)
         try:
@@ -1180,44 +1085,43 @@
                 raise
             return self.space.call_function(self.space.w_float, w_num_str)
 
-    @always_inline
-    def handle_dictelement(self, node, i):
-        if node.get_child(i).type == tokens.DOUBLESTAR:
-            key = None
-            value = self.handle_expr(node.get_child(i+1))
-            i += 2
-        else:
-            key = self.handle_expr(node.get_child(i))
-            value = self.handle_expr(node.get_child(i+2))
-            i += 3
-        return (i,key,value)
-
     def handle_atom(self, atom_node):
         first_child = atom_node.get_child(0)
         first_child_type = first_child.type
         if first_child_type == tokens.NAME:
-            name = first_child.get_value()
-            if name == "None":
-                w_singleton = self.space.w_None
-            elif name == "True":
-                w_singleton = self.space.w_True
-            elif name == "False":
-                w_singleton = self.space.w_False
+            return ast.Name(first_child.get_value(), ast.Load,
+                            first_child.get_lineno(), first_child.get_column())
+        elif first_child_type == tokens.STRING:
+            space = self.space
+            encoding = self.compile_info.encoding
+            flags = self.compile_info.flags
+            unicode_literals = flags & consts.CO_FUTURE_UNICODE_LITERALS
+            sub_strings_w = []
+            for index in range(atom_node.num_children()):
+                child = atom_node.get_child(index)
+                try:
+                    sub_strings_w.append(parsestring.parsestr(space, encoding, 
child.get_value(),
+                                                              
unicode_literals))
+                except error.OperationError as e:
+                    if not e.match(space, space.w_UnicodeError):
+                        raise
+                    # UnicodeError in literal: turn into SyntaxError
+                    e.normalize_exception(space)
+                    errmsg = space.text_w(space.str(e.get_w_value(space)))
+                    if child is None:
+                        child = atom_node
+                    raise self.error('(unicode error) %s' % errmsg, child)
+            # This implements implicit string concatenation.
+            if len(sub_strings_w) > 1:
+                w_sub_strings = space.newlist(sub_strings_w)
+                w_join = space.getattr(space.newtext(""), 
space.newtext("join"))
+                final_string = space.call_function(w_join, w_sub_strings)
             else:
-                name = self.new_identifier(name)
-                return ast.Name(name, ast.Load, first_child.get_lineno(),
-                                first_child.get_column())
-            return ast.NameConstant(w_singleton, first_child.get_lineno(),
-                                first_child.get_column())
-        #
-        elif first_child_type == tokens.STRING:
-            return fstring.string_parse_literal(self, atom_node)
-        #
+                final_string = sub_strings_w[0]
+            return ast.Str(final_string, atom_node.get_lineno(), 
atom_node.get_column())
         elif first_child_type == tokens.NUMBER:
             num_value = self.parse_number(first_child.get_value())
             return ast.Num(num_value, atom_node.get_lineno(), 
atom_node.get_column())
-        elif first_child_type == tokens.ELLIPSIS:
-            return ast.Ellipsis(atom_node.get_lineno(), atom_node.get_column())
         elif first_child_type == tokens.LPAR:
             second_child = atom_node.get_child(1)
             if second_child.type == tokens.RPAR:
@@ -1239,31 +1143,28 @@
             return self.handle_listcomp(second_child)
         elif first_child_type == tokens.LBRACE:
             maker = atom_node.get_child(1)
+            if maker.type == tokens.RBRACE:
+                return ast.Dict(None, None, atom_node.get_lineno(), 
atom_node.get_column())
             n_maker_children = maker.num_children()
-            if maker.type == tokens.RBRACE:
-                # an empty dict
-                return ast.Dict(None, None, atom_node.get_lineno(), 
atom_node.get_column())
-            else:
-                is_dict = maker.get_child(0).type == tokens.DOUBLESTAR
-                if (n_maker_children == 1 or
-                    (n_maker_children > 1 and
-                     maker.get_child(1).type == tokens.COMMA)):
-                    # a set display
-                    return self.handle_setdisplay(maker, atom_node)
-                elif n_maker_children > 1 and maker.get_child(1).type == 
syms.comp_for:
-                    # a set comprehension
-                    return self.handle_setcomp(maker, atom_node)
-                elif (n_maker_children > (3-is_dict) and
-                      maker.get_child(3-is_dict).type == syms.comp_for):
-                    # a dictionary comprehension
-                    if is_dict:
-                        raise self.error("dict unpacking cannot be used in "
-                                         "dict comprehension", atom_node)
-                    
-                    return self.handle_dictcomp(maker, atom_node)
-                else:
-                    # a dictionary display
-                    return self.handle_dictdisplay(maker, atom_node)
+            if n_maker_children == 1 or maker.get_child(1).type == 
tokens.COMMA:
+                elts = []
+                for i in range(0, n_maker_children, 2):
+                    elts.append(self.handle_expr(maker.get_child(i)))
+                return ast.Set(elts, atom_node.get_lineno(), 
atom_node.get_column())
+            if maker.get_child(1).type == syms.comp_for:
+                return self.handle_setcomp(maker)
+            if (n_maker_children > 3 and
+                maker.get_child(3).type == syms.comp_for):
+                return self.handle_dictcomp(maker)
+            keys = []
+            values = []
+            for i in range(0, n_maker_children, 4):
+                keys.append(self.handle_expr(maker.get_child(i)))
+                values.append(self.handle_expr(maker.get_child(i + 2)))
+            return ast.Dict(keys, values, atom_node.get_lineno(), 
atom_node.get_column())
+        elif first_child_type == tokens.BACKQUOTE:
+            expr = self.handle_testlist(atom_node.get_child(1))
+            return ast.Repr(expr, atom_node.get_lineno(), 
atom_node.get_column())
         else:
             raise AssertionError("unknown atom")
 
@@ -1273,7 +1174,7 @@
             return self.handle_genexp(gexp_node)
         return self.handle_testlist(gexp_node)
 
-    def count_comp_fors(self, comp_node):
+    def count_comp_fors(self, comp_node, for_type, if_type):
         count = 0
         current_for = comp_node
         while True:
@@ -1284,10 +1185,10 @@
                 return count
             while True:
                 first_child = current_iter.get_child(0)
-                if first_child.type == syms.comp_for:
+                if first_child.type == for_type:
                     current_for = current_iter.get_child(0)
                     break
-                elif first_child.type == syms.comp_if:
+                elif first_child.type == if_type:
                     if first_child.num_children() == 3:
                         current_iter = first_child.get_child(2)
                     else:
@@ -1295,40 +1196,48 @@
                 else:
                     raise AssertionError("should not reach here")
 
-    def count_comp_ifs(self, iter_node):
+    def count_comp_ifs(self, iter_node, for_type):
         count = 0
         while True:
             first_child = iter_node.get_child(0)
-            if first_child.type == syms.comp_for:
+            if first_child.type == for_type:
                 return count
             count += 1
             if first_child.num_children() == 2:
                 return count
             iter_node = first_child.get_child(2)
 
-    def comprehension_helper(self, comp_node):
-        fors_count = self.count_comp_fors(comp_node)
+    @specialize.arg(2)
+    def comprehension_helper(self, comp_node,
+                             handle_source_expr_meth="handle_expr",
+                             for_type=syms.comp_for, if_type=syms.comp_if,
+                             iter_type=syms.comp_iter,
+                             comp_fix_unamed_tuple_location=False):
+        handle_source_expression = getattr(self, handle_source_expr_meth)
+        fors_count = self.count_comp_fors(comp_node, for_type, if_type)
         comps = []
         for i in range(fors_count):
             for_node = comp_node.get_child(1)
             for_targets = self.handle_exprlist(for_node, ast.Store)
-            expr = self.handle_expr(comp_node.get_child(3))
+            expr = handle_source_expression(comp_node.get_child(3))
             assert isinstance(expr, ast.expr)
             if for_node.num_children() == 1:
                 comp = ast.comprehension(for_targets[0], expr, None)
             else:
+                col = comp_node.get_column()
+                line = comp_node.get_lineno()
                 # Modified in python2.7, see http://bugs.python.org/issue6704
-                # Fixing unamed tuple location
-                expr_node = for_targets[0]
-                assert isinstance(expr_node, ast.expr)
-                col = expr_node.col_offset
-                line = expr_node.lineno
+                if comp_fix_unamed_tuple_location:
+                    expr_node = for_targets[0]
+                    assert isinstance(expr_node, ast.expr)
+                    col = expr_node.col_offset
+                    line = expr_node.lineno
                 target = ast.Tuple(for_targets, ast.Store, line, col)
                 comp = ast.comprehension(target, expr, None)
             if comp_node.num_children() == 5:
                 comp_node = comp_iter = comp_node.get_child(4)
-                assert comp_iter.type == syms.comp_iter
-                ifs_count = self.count_comp_ifs(comp_iter)
+                assert comp_iter.type == iter_type
+                ifs_count = self.count_comp_ifs(comp_iter, for_type)
                 if ifs_count:
                     ifs = []
                     for j in range(ifs_count):
@@ -1337,66 +1246,42 @@
                         if comp_if.num_children() == 3:
                             comp_node = comp_iter = comp_if.get_child(2)
                     comp.ifs = ifs
-                if comp_node.type == syms.comp_iter:
+                if comp_node.type == iter_type:
                     comp_node = comp_node.get_child(0)
             assert isinstance(comp, ast.comprehension)
             comps.append(comp)
         return comps
 
     def handle_genexp(self, genexp_node):
-        ch = genexp_node.get_child(0)
-        elt = self.handle_expr(ch)
-        if isinstance(elt, ast.Starred):
-            self.error("iterable unpacking cannot be used in comprehension", 
ch)
-        comps = self.comprehension_helper(genexp_node.get_child(1))
+        elt = self.handle_expr(genexp_node.get_child(0))
+        comps = self.comprehension_helper(genexp_node.get_child(1),
+                                          comp_fix_unamed_tuple_location=True)
         return ast.GeneratorExp(elt, comps, genexp_node.get_lineno(),
                                 genexp_node.get_column())
 
     def handle_listcomp(self, listcomp_node):
-        ch = listcomp_node.get_child(0)
-        elt = self.handle_expr(ch)
-        if isinstance(elt, ast.Starred):
-            self.error("iterable unpacking cannot be used in comprehension", 
ch)
-        comps = self.comprehension_helper(listcomp_node.get_child(1))
+        elt = self.handle_expr(listcomp_node.get_child(0))
+        comps = self.comprehension_helper(listcomp_node.get_child(1),
+                                          "handle_testlist",
+                                          syms.list_for, syms.list_if,
+                                          syms.list_iter,
+                                          comp_fix_unamed_tuple_location=True)
         return ast.ListComp(elt, comps, listcomp_node.get_lineno(),
                             listcomp_node.get_column())
 
-    def handle_setcomp(self, set_maker, atom_node):
-        ch = set_maker.get_child(0)
-        elt = self.handle_expr(ch)
-        if isinstance(elt, ast.Starred):
-            self.error("iterable unpacking cannot be used in comprehension", 
ch)
-        comps = self.comprehension_helper(set_maker.get_child(1))
-        return ast.SetComp(elt, comps, atom_node.get_lineno(),
-                                       atom_node.get_column())
+    def handle_setcomp(self, set_maker):
+        elt = self.handle_expr(set_maker.get_child(0))
+        comps = self.comprehension_helper(set_maker.get_child(1),
+                                          comp_fix_unamed_tuple_location=True)
+        return ast.SetComp(elt, comps, set_maker.get_lineno(), 
set_maker.get_column())
 
-    def handle_dictcomp(self, dict_maker, atom_node):
-        i, key, value = self.handle_dictelement(dict_maker, 0)
-        comps = self.comprehension_helper(dict_maker.get_child(i))
-        return ast.DictComp(key, value, comps, atom_node.get_lineno(),
-                                               atom_node.get_column())
-    
-    def handle_dictdisplay(self, node, atom_node):
-        keys = []
-        values = []
-        i = 0
-        while i < node.num_children():
-            i, key, value = self.handle_dictelement(node, i)
-            keys.append(key)
-            values.append(value)
-            i += 1
-        return ast.Dict(keys, values, atom_node.get_lineno(),
-                                      atom_node.get_column())
-    
-    def handle_setdisplay(self, node, atom_node):
-        elts = []
-        i = 0
-        while i < node.num_children():
-            expr = self.handle_expr(node.get_child(i))
-            elts.append(expr)
-            i += 2
-        return ast.Set(elts, atom_node.get_lineno(),
-                             atom_node.get_column())
+    def handle_dictcomp(self, dict_maker):
+        key = self.handle_expr(dict_maker.get_child(0))
+        value = self.handle_expr(dict_maker.get_child(2))
+        comps = self.comprehension_helper(dict_maker.get_child(3),
+                                          comp_fix_unamed_tuple_location=True)
+        return ast.DictComp(key, value, comps, dict_maker.get_lineno(),
+                            dict_maker.get_column())
 
     def handle_exprlist(self, exprlist, context):
         exprs = []
diff --git a/pypy/interpreter/astcompiler/test/test_astbuilder.py 
b/pypy/interpreter/astcompiler/test/test_astbuilder.py
--- a/pypy/interpreter/astcompiler/test/test_astbuilder.py
+++ b/pypy/interpreter/astcompiler/test/test_astbuilder.py
@@ -1408,3 +1408,11 @@
         exc = py.test.raises(SyntaxError, self.get_ast, input).value
         assert exc.msg == ("(unicode error) 'unicodeescape' codec can't decode"
                            " bytes in position 0-2: truncated \\xXX escape")
+
+    def test_decode_error_in_string_literal_correct_line(self):
+        input = "u'a' u'b'\\\n u'c' u'\\x'"
+        exc = py.test.raises(SyntaxError, self.get_ast, input).value
+        assert exc.msg == ("(unicode error) 'unicodeescape' codec can't decode"
+                           " bytes in position 0-1: truncated \\xXX escape")
+        assert exc.lineno == 2
+        assert exc.offset == 6
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -207,6 +207,21 @@
     def _set_mapdict_storage_and_map(self, storage, map):
         raise NotImplementedError
 
+
+    # -------------------------------------------------------------------
+    # cpyext support
+    # these functions will only be seen by the annotator if we translate
+    # with the cpyext module
+
+    def _cpyext_as_pyobj(self, space):
+        from pypy.module.cpyext.pyobject import w_root_as_pyobj
+        return w_root_as_pyobj(self, space)
+
+    def _cpyext_attach_pyobj(self, space, py_obj):
+        from pypy.module.cpyext.pyobject import w_root_attach_pyobj
+        return w_root_attach_pyobj(self, space, py_obj)
+
+
     # -------------------------------------------------------------------
 
     def is_w(self, space, w_other):
diff --git a/pypy/module/__pypy__/interp_magic.py 
b/pypy/module/__pypy__/interp_magic.py
--- a/pypy/module/__pypy__/interp_magic.py
+++ b/pypy/module/__pypy__/interp_magic.py
@@ -134,7 +134,7 @@
                   space.newbool(debug))
 
 @unwrap_spec(estimate=int)
-def add_memory_pressure(estimate):
+def add_memory_pressure(space, estimate):
     """ Add memory pressure of estimate bytes. Useful when calling a C function
     that internally allocates a big chunk of memory. This instructs the GC to
     garbage collect sooner than it would otherwise."""
diff --git a/pypy/module/_cffi_backend/__init__.py 
b/pypy/module/_cffi_backend/__init__.py
--- a/pypy/module/_cffi_backend/__init__.py
+++ b/pypy/module/_cffi_backend/__init__.py
@@ -3,7 +3,7 @@
 from rpython.rlib import rdynload, clibffi
 from rpython.rtyper.lltypesystem import rffi
 
-VERSION = "1.11.3"
+VERSION = "1.11.4"
 
 FFI_DEFAULT_ABI = clibffi.FFI_DEFAULT_ABI
 try:
diff --git a/pypy/module/_cffi_backend/allocator.py 
b/pypy/module/_cffi_backend/allocator.py
--- a/pypy/module/_cffi_backend/allocator.py
+++ b/pypy/module/_cffi_backend/allocator.py
@@ -21,13 +21,13 @@
         if self.w_alloc is None:
             if self.should_clear_after_alloc:
                 ptr = lltype.malloc(rffi.CCHARP.TO, datasize,
-                                    flavor='raw', zero=True,
-                                    add_memory_pressure=True)
+                                    flavor='raw', zero=True)
             else:
                 ptr = lltype.malloc(rffi.CCHARP.TO, datasize,
-                                    flavor='raw', zero=False,
-                                    add_memory_pressure=True)
-            return cdataobj.W_CDataNewStd(space, ptr, ctype, length)
+                                    flavor='raw', zero=False)
+            w_res = cdataobj.W_CDataNewStd(space, ptr, ctype, length)
+            rgc.add_memory_pressure(datasize, w_res)
+            return w_res
         else:
             w_raw_cdata = space.call_function(self.w_alloc,
                                               space.newint(datasize))
@@ -53,7 +53,7 @@
             if self.w_free is not None:
                 res.w_free = self.w_free
                 res.register_finalizer(space)
-            rgc.add_memory_pressure(datasize)
+            rgc.add_memory_pressure(datasize, res)
             return res
 
     @unwrap_spec(w_init=WrappedDefault(None))
diff --git a/pypy/module/_cffi_backend/cdataobj.py 
b/pypy/module/_cffi_backend/cdataobj.py
--- a/pypy/module/_cffi_backend/cdataobj.py
+++ b/pypy/module/_cffi_backend/cdataobj.py
@@ -447,7 +447,10 @@
             with self as ptr:
                 w_res = W_CDataGCP(space, ptr, self.ctype, self, w_destructor)
         if size != 0:
-            rgc.add_memory_pressure(size)
+            if isinstance(w_res, W_CDataGCP):
+                rgc.add_memory_pressure(size, w_res)
+            else:
+                rgc.add_memory_pressure(size, self)
         return w_res
 
     def unpack(self, length):
diff --git a/pypy/module/_cffi_backend/test/_backend_test_c.py 
b/pypy/module/_cffi_backend/test/_backend_test_c.py
--- a/pypy/module/_cffi_backend/test/_backend_test_c.py
+++ b/pypy/module/_cffi_backend/test/_backend_test_c.py
@@ -1,7 +1,7 @@
 # ____________________________________________________________
 
 import sys
-assert __version__ == "1.11.3", ("This test_c.py file is for testing a version"
+assert __version__ == "1.11.4", ("This test_c.py file is for testing a version"
                                  " of cffi that differs from the one that we"
                                  " get from 'import _cffi_backend'")
 if sys.version_info < (3,):
diff --git a/pypy/module/_io/test/test_interp_textio.py 
b/pypy/module/_io/test/test_interp_textio.py
--- a/pypy/module/_io/test/test_interp_textio.py
+++ b/pypy/module/_io/test/test_interp_textio.py
@@ -1,6 +1,6 @@
 import pytest
 try:
-    from hypothesis import given, strategies as st
+    from hypothesis import given, strategies as st, settings
 except ImportError:
     pytest.skip("hypothesis required")
 import os
@@ -29,6 +29,7 @@
 
 @given(data=st_readline(),
        mode=st.sampled_from(['\r', '\n', '\r\n', '']))
+@settings(deadline=None)
 def test_readline(space, data, mode):
     txt, limits = data
     w_stream = W_BytesIO(space)
diff --git a/pypy/module/cpyext/include/longobject.h 
b/pypy/module/cpyext/include/longobject.h
--- a/pypy/module/cpyext/include/longobject.h
+++ b/pypy/module/cpyext/include/longobject.h
@@ -20,6 +20,9 @@
 
 #define PyLong_AS_LONG(op) PyLong_AsLong(op)
 
+#define _PyLong_AsByteArray(v, bytes, n, little_endian, is_signed)   \
+    _PyLong_AsByteArrayO((PyObject *)(v), bytes, n, little_endian, is_signed)
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/pypy/module/cpyext/longobject.py b/pypy/module/cpyext/longobject.py
--- a/pypy/module/cpyext/longobject.py
+++ b/pypy/module/cpyext/longobject.py
@@ -3,8 +3,8 @@
     cpython_api, PyObject, build_type_checkers_flags, Py_ssize_t,
     CONST_STRING, ADDR, CANNOT_FAIL)
 from pypy.objspace.std.longobject import W_LongObject
-from pypy.interpreter.error import OperationError
-from rpython.rlib.rbigint import rbigint
+from pypy.interpreter.error import OperationError, oefmt
+from rpython.rlib.rbigint import rbigint, InvalidSignednessError
 
 PyLong_Check, PyLong_CheckExact = build_type_checkers_flags("Long")
 
@@ -251,3 +251,26 @@
         byteorder = 'big'
     result = rbigint.frombytes(s, byteorder, signed != 0)
     return space.newlong_from_rbigint(result)
+
+@cpython_api([PyObject, rffi.UCHARP, rffi.SIZE_T,
+              rffi.INT_real, rffi.INT_real], rffi.INT_real, error=-1)
+def _PyLong_AsByteArrayO(space, w_v, bytes, n, little_endian, is_signed):
+    n = rffi.cast(lltype.Signed, n)
+    little_endian = rffi.cast(lltype.Signed, little_endian)
+    signed = rffi.cast(lltype.Signed, is_signed) != 0
+    byteorder = 'little' if little_endian else 'big'
+    bigint = space.bigint_w(w_v)
+    try:
+        digits = bigint.tobytes(n, byteorder, signed)
+    except InvalidSignednessError:     # < 0 but not 'signed'
+        # in this case, CPython raises OverflowError even though the C
+        # comments say it should raise TypeError
+        raise oefmt(space.w_OverflowError,
+                    "can't convert negative long to unsigned")
+    except OverflowError:
+        raise oefmt(space.w_OverflowError,
+                    "long too big to convert")
+    assert len(digits) == n
+    for i in range(n):
+        bytes[i] = rffi.cast(rffi.UCHAR, digits[i])
+    return 0
diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py
--- a/pypy/module/cpyext/pyobject.py
+++ b/pypy/module/cpyext/pyobject.py
@@ -10,6 +10,8 @@
     PyVarObject, Py_ssize_t, init_function, cts)
 from pypy.module.cpyext.state import State
 from pypy.objspace.std.typeobject import W_TypeObject
+from pypy.objspace.std.noneobject import W_NoneObject
+from pypy.objspace.std.boolobject import W_BoolObject
 from pypy.objspace.std.objectobject import W_ObjectObject
 from rpython.rlib.objectmodel import specialize, we_are_translated
 from rpython.rlib.objectmodel import keepalive_until_here
@@ -21,6 +23,52 @@
 #________________________________________________________
 # type description
 
+class W_BaseCPyObject(W_ObjectObject):
+    """ A subclass of W_ObjectObject that has one field for directly storing
+    the link from the w_obj to the cpy ref. This is only used for C-defined
+    types. """
+
+
+def check_true(s_arg, bookeeper):
+    assert s_arg.const is True
+
+def w_root_as_pyobj(w_obj, space):
+    from rpython.rlib.debug import check_annotation
+    # make sure that translation crashes if we see this while not translating
+    # with cpyext
+    check_annotation(space.config.objspace.usemodules.cpyext, check_true)
+    # default implementation of _cpyext_as_pyobj
+    return rawrefcount.from_obj(PyObject, w_obj)
+
+def w_root_attach_pyobj(w_obj, space, py_obj):
+    from rpython.rlib.debug import check_annotation
+    check_annotation(space.config.objspace.usemodules.cpyext, check_true)
+    assert space.config.objspace.usemodules.cpyext
+    # default implementation of _cpyext_attach_pyobj
+    rawrefcount.create_link_pypy(w_obj, py_obj)
+
+
+def add_direct_pyobj_storage(cls):
+    """ Add the necessary methods to a class to store a reference to the py_obj
+    on its instances directly. """
+
+    cls._cpy_ref = lltype.nullptr(PyObject.TO)
+
+    def _cpyext_as_pyobj(self, space):
+        return self._cpy_ref
+    cls._cpyext_as_pyobj = _cpyext_as_pyobj
+
+    def _cpyext_attach_pyobj(self, space, py_obj):
+        self._cpy_ref = py_obj
+        rawrefcount.create_link_pyobj(self, py_obj)
+    cls._cpyext_attach_pyobj = _cpyext_attach_pyobj
+
+add_direct_pyobj_storage(W_BaseCPyObject)
+add_direct_pyobj_storage(W_TypeObject)
+add_direct_pyobj_storage(W_NoneObject)
+add_direct_pyobj_storage(W_BoolObject)
+
+
 class BaseCpyTypedescr(object):
     basestruct = PyObject.TO
     W_BaseObject = W_ObjectObject
@@ -66,8 +114,12 @@
 
     def realize(self, space, obj):
         w_type = from_ref(space, rffi.cast(PyObject, obj.c_ob_type))
+        assert isinstance(w_type, W_TypeObject)
         try:
-            w_obj = space.allocate_instance(self.W_BaseObject, w_type)
+            if w_type.flag_cpytype:
+                w_obj = space.allocate_instance(W_BaseCPyObject, w_type)
+            else:
+                w_obj = space.allocate_instance(self.W_BaseObject, w_type)
         except OperationError as e:
             if e.match(space, space.w_TypeError):
                 raise oefmt(space.w_SystemError,
@@ -76,6 +128,9 @@
                             w_type)
             raise
         track_reference(space, obj, w_obj)
+        if w_type.flag_cpytype:
+            assert isinstance(w_obj, W_BaseCPyObject)
+            w_obj._cpy_ref = obj
         return w_obj
 
 typedescr_cache = {}
@@ -186,12 +241,12 @@
     Ties together a PyObject and an interpreter object.
     The PyObject's refcnt is increased by REFCNT_FROM_PYPY.
     The reference in 'py_obj' is not stolen!  Remember to decref()
-    it is you need to.
+    it if you need to.
     """
     # XXX looks like a PyObject_GC_TRACK
     assert py_obj.c_ob_refcnt < rawrefcount.REFCNT_FROM_PYPY
     py_obj.c_ob_refcnt += rawrefcount.REFCNT_FROM_PYPY
-    rawrefcount.create_link_pypy(w_obj, py_obj)
+    w_obj._cpyext_attach_pyobj(space, py_obj)
 
 
 w_marker_deallocating = W_Root()
@@ -237,7 +292,7 @@
 @jit.dont_look_inside
 def as_pyobj(space, w_obj, w_userdata=None, immortal=False):
     """
-    Returns a 'PyObject *' representing the given intepreter object.
+    Returns a 'PyObject *' representing the given interpreter object.
     This doesn't give a new reference, but the returned 'PyObject *'
     is valid at least as long as 'w_obj' is.  **To be safe, you should
     use keepalive_until_here(w_obj) some time later.**  In case of
@@ -245,7 +300,7 @@
     """
     assert not is_pyobj(w_obj)
     if w_obj is not None:
-        py_obj = rawrefcount.from_obj(PyObject, w_obj)
+        py_obj = w_obj._cpyext_as_pyobj(space)
         if not py_obj:
             py_obj = create_ref(space, w_obj, w_userdata, immortal=immortal)
         #
diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py
--- a/pypy/module/cpyext/sequence.py
+++ b/pypy/module/cpyext/sequence.py
@@ -267,7 +267,7 @@
     raise oefmt(space.w_ValueError, "sequence.index(x): x not in sequence")
 
 class CPyListStrategy(ListStrategy):
-    erase, unerase = rerased.new_erasing_pair("empty")
+    erase, unerase = rerased.new_erasing_pair("cpylist")
     erase = staticmethod(erase)
     unerase = staticmethod(unerase)
 
diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py
--- a/pypy/module/cpyext/stubs.py
+++ b/pypy/module/cpyext/stubs.py
@@ -1577,13 +1577,6 @@
     """
     raise NotImplementedError
 
-@cpython_api([PyObject], PyObject)
-def PyUnicode_AsUTF32String(space, unicode):
-    """Return a Python byte string using the UTF-32 encoding in native byte
-    order. The string always starts with a BOM mark.  Error handling is 
"strict".
-    Return NULL if an exception was raised by the codec."""
-    raise NotImplementedError
-
 @cpython_api([rffi.CCHARP, Py_ssize_t, rffi.CCHARP, rffi.INTP, Py_ssize_t], 
PyObject)
 def PyUnicode_DecodeUTF16Stateful(space, s, size, errors, byteorder, consumed):
     """If consumed is NULL, behave like PyUnicode_DecodeUTF16(). If
@@ -1612,13 +1605,6 @@
     Return NULL if an exception was raised by the codec."""
     raise NotImplementedError
 
-@cpython_api([PyObject], PyObject)
-def PyUnicode_AsUTF16String(space, unicode):
-    """Return a Python byte string using the UTF-16 encoding in native byte
-    order. The string always starts with a BOM mark.  Error handling is 
"strict".
-    Return NULL if an exception was raised by the codec."""
-    raise NotImplementedError
-
 @cpython_api([rffi.CCHARP, Py_ssize_t, rffi.CCHARP], PyObject)
 def PyUnicode_DecodeUTF7(space, s, size, errors):
     """Create a Unicode object by decoding size bytes of the UTF-7 encoded 
string
diff --git a/pypy/module/cpyext/test/test_longobject.py 
b/pypy/module/cpyext/test/test_longobject.py
--- a/pypy/module/cpyext/test/test_longobject.py
+++ b/pypy/module/cpyext/test/test_longobject.py
@@ -259,6 +259,48 @@
         assert module.from_bytearray(False, False) == 0x9ABC41
         assert module.from_bytearray(False, True) == -0x6543BF
 
+    def test_asbytearray(self):
+        module = self.import_extension('foo', [
+            ("as_bytearray", "METH_VARARGS",
+             """
+                 PyObject *result;
+                 PyLongObject *o;
+                 int n, little_endian, is_signed;
+                 unsigned char *bytes;
+                 if (!PyArg_ParseTuple(args, "O!iii", &PyLong_Type, &o, &n,
+                         &little_endian, &is_signed))
+                     return NULL;
+                 bytes = malloc(n);
+                 if (_PyLong_AsByteArray(o, bytes, (size_t)n,
+                                         little_endian, is_signed) != 0)
+                 {
+                     free(bytes);
+                     return NULL;
+                 }
+                 result = PyString_FromStringAndSize((const char *)bytes, n);
+                 free(bytes);
+                 return result;
+             """),
+            ])
+        s = module.as_bytearray(0x41BC9AL, 4, True, False)
+        assert s == "\x9A\xBC\x41\x00"
+        s = module.as_bytearray(0x41BC9AL, 4, False, False)
+        assert s == "\x00\x41\xBC\x9A"
+        s = module.as_bytearray(0x41BC9AL, 3, True, False)
+        assert s == "\x9A\xBC\x41"
+        s = module.as_bytearray(0x41BC9AL, 3, True, True)
+        assert s == "\x9A\xBC\x41"
+        s = module.as_bytearray(0x9876L, 2, True, False)
+        assert s == "\x76\x98"
+        s = module.as_bytearray(0x9876L - 0x10000L, 2, True, True)
+        assert s == "\x76\x98"
+        raises(OverflowError, module.as_bytearray,
+                              0x9876L, 2, False, True)
+        raises(OverflowError, module.as_bytearray,
+                              -1L, 2, True, False)
+        raises(OverflowError, module.as_bytearray,
+                              0x1234567L, 3, True, False)
+
     def test_fromunicode(self):
         module = self.import_extension('foo', [
             ("from_unicode", "METH_O",
diff --git a/pypy/module/cpyext/test/test_object.py 
b/pypy/module/cpyext/test/test_object.py
--- a/pypy/module/cpyext/test/test_object.py
+++ b/pypy/module/cpyext/test/test_object.py
@@ -218,7 +218,7 @@
 
         if not cls.runappdirect:
             cls.total_mem = 0
-            def add_memory_pressure(estimate):
+            def add_memory_pressure(estimate, object=None):
                 assert estimate >= 0
                 cls.total_mem += estimate
             cls.orig_add_memory_pressure = [rgc.add_memory_pressure]
diff --git a/pypy/module/cpyext/test/test_typeobject.py 
b/pypy/module/cpyext/test/test_typeobject.py
--- a/pypy/module/cpyext/test/test_typeobject.py
+++ b/pypy/module/cpyext/test/test_typeobject.py
@@ -3,13 +3,23 @@
 from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase
 from pypy.module.cpyext.test.test_api import BaseApiTest
 from pypy.module.cpyext.api import generic_cpy_call
-from pypy.module.cpyext.pyobject import make_ref, from_ref, decref
+from pypy.module.cpyext.pyobject import make_ref, from_ref, decref, as_pyobj
 from pypy.module.cpyext.typeobject import cts, PyTypeObjectPtr
 
 import sys
 import pytest
 
 class AppTestTypeObject(AppTestCpythonExtensionBase):
+
+    def setup_class(cls):
+        AppTestCpythonExtensionBase.setup_class.im_func(cls)
+        def _check_uses_shortcut(w_inst):
+            res = hasattr(w_inst, "_cpy_ref") and w_inst._cpy_ref
+            res = res and as_pyobj(cls.space, w_inst) == w_inst._cpy_ref
+            return cls.space.newbool(res)
+        cls.w__check_uses_shortcut = cls.space.wrap(
+            gateway.interp2app(_check_uses_shortcut))
+
     def test_typeobject(self):
         import sys
         module = self.import_module(name='foo')
@@ -162,6 +172,25 @@
         assert fuu2(u"abc").baz().escape()
_______________________________________________
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit

Reply via email to