Author: Carl Friedrich Bolz-Tereick <cfb...@gmx.de>
Branch: py3.6
Changeset: r97568:65e4ad6eb6b8
Date: 2019-09-20 12:43 +0200
http://bitbucket.org/pypy/pypy/changeset/65e4ad6eb6b8/

Log:    merge default

diff too long, truncating to 2000 out of 8630 lines

diff --git a/extra_tests/cffi_tests/test_egg_version.py 
b/extra_tests/cffi_tests/test_version.py
rename from extra_tests/cffi_tests/test_egg_version.py
rename to extra_tests/cffi_tests/test_version.py
--- a/extra_tests/cffi_tests/test_egg_version.py
+++ b/extra_tests/cffi_tests/test_version.py
@@ -1,6 +1,7 @@
 from email.parser import Parser
 
 import py
+import urllib2
 
 import cffi
 import pypy
@@ -10,3 +11,12 @@
 def test_egg_version():
     info = Parser().parsestr(egg_info.read())
     assert info['version'] == cffi.__version__
+
+def test_pycparser_version():
+    url = 
'https://raw.githubusercontent.com/eliben/pycparser/master/pycparser/__init__.py'
+    source = urllib2.urlopen(url).read()
+    dest = py.path.local(__file__).join('..', '..', '..', 'lib_pypy', 'cffi',
+                                        '_pycparser', '__init__.py').read()
+    # if this fails, the vendored pycparser is not the latest version
+    assert source.strip() == dest.strip()
+    
diff --git a/lib_pypy/cffi/_pycparser/__init__.py 
b/lib_pypy/cffi/_pycparser/__init__.py
--- a/lib_pypy/cffi/_pycparser/__init__.py
+++ b/lib_pypy/cffi/_pycparser/__init__.py
@@ -4,12 +4,14 @@
 # This package file exports some convenience functions for
 # interacting with pycparser
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #-----------------------------------------------------------------
 __all__ = ['c_lexer', 'c_parser', 'c_ast']
-__version__ = '2.14'
+__version__ = '2.19'
 
+import io
+from subprocess import check_output
 from .c_parser import CParser
 
 
@@ -27,7 +29,6 @@
         When successful, returns the preprocessed file's contents.
         Errors from cpp will be printed out.
     """
-    from subprocess import Popen, PIPE
     path_list = [cpp_path]
     if isinstance(cpp_args, list):
         path_list += cpp_args
@@ -38,11 +39,7 @@
     try:
         # Note the use of universal_newlines to treat all newlines
         # as \n for Python's purpose
-        #
-        pipe = Popen(   path_list,
-                        stdout=PIPE,
-                        universal_newlines=True)
-        text = pipe.communicate()[0]
+        text = check_output(path_list, universal_newlines=True)
     except OSError as e:
         raise RuntimeError("Unable to invoke 'cpp'.  " +
             'Make sure its path was passed correctly\n' +
@@ -85,7 +82,7 @@
     if use_cpp:
         text = preprocess_file(filename, cpp_path, cpp_args)
     else:
-        with open(filename, 'rU') as f:
+        with io.open(filename) as f:
             text = f.read()
 
     if parser is None:
diff --git a/lib_pypy/cffi/_pycparser/_ast_gen.py 
b/lib_pypy/cffi/_pycparser/_ast_gen.py
--- a/lib_pypy/cffi/_pycparser/_ast_gen.py
+++ b/lib_pypy/cffi/_pycparser/_ast_gen.py
@@ -7,7 +7,7 @@
 # The design of this module was inspired by astgen.py from the
 # Python 2.5 code-base.
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #-----------------------------------------------------------------
 import pprint
@@ -63,6 +63,7 @@
         contents: a list of contents - attributes and child nodes
         See comment at the top of the configuration file for details.
     """
+
     def __init__(self, name, contents):
         self.name = name
         self.all_entries = []
@@ -84,6 +85,8 @@
     def generate_source(self):
         src = self._gen_init()
         src += '\n' + self._gen_children()
+        src += '\n' + self._gen_iter()
+
         src += '\n' + self._gen_attr_names()
         return src
 
@@ -131,6 +134,33 @@
 
         return src
 
+    def _gen_iter(self):
+        src = '    def __iter__(self):\n'
+
+        if self.all_entries:
+            for child in self.child:
+                src += (
+                    '        if self.%(child)s is not None:\n' +
+                    '            yield self.%(child)s\n') % (dict(child=child))
+
+            for seq_child in self.seq_child:
+                src += (
+                    '        for child in (self.%(child)s or []):\n'
+                    '            yield child\n') % (dict(child=seq_child))
+
+            if not (self.child or self.seq_child):
+                # Empty generator
+                src += (
+                    '        return\n' +
+                    '        yield\n')
+        else:
+            # Empty generator
+            src += (
+                '        return\n' +
+                '        yield\n')
+
+        return src
+
     def _gen_attr_names(self):
         src = "    attr_names = (" + ''.join("%r, " % nm for nm in self.attr) 
+ ')'
         return src
@@ -150,7 +180,7 @@
 #
 # AST Node classes.
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #-----------------------------------------------------------------
 
@@ -159,11 +189,38 @@
 _PROLOGUE_CODE = r'''
 import sys
 
+def _repr(obj):
+    """
+    Get the representation of an object, with dedicated pprint-like format for 
lists.
+    """
+    if isinstance(obj, list):
+        return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in 
obj))) + '\n]'
+    else:
+        return repr(obj) 
 
 class Node(object):
     __slots__ = ()
     """ Abstract base class for AST nodes.
     """
+    def __repr__(self):
+        """ Generates a python representation of the current node
+        """
+        result = self.__class__.__name__ + '('
+        
+        indent = ''
+        separator = ''
+        for name in self.__slots__[:-2]:
+            result += separator
+            result += indent
+            result += name + '=' + (_repr(getattr(self, name)).replace('\n', 
'\n  ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+            
+            separator = ','
+            indent = '\n ' + (' ' * len(self.__class__.__name__))
+        
+        result += indent + ')'
+        
+        return result
+
     def children(self):
         """ A sequence of all children that are Nodes
         """
@@ -253,26 +310,29 @@
         *   Modeled after Python's own AST visiting facilities
             (the ast module of Python 3.0)
     """
+
+    _method_cache = None
+
     def visit(self, node):
         """ Visit a node.
         """
-        method = 'visit_' + node.__class__.__name__
-        visitor = getattr(self, method, self.generic_visit)
+
+        if self._method_cache is None:
+            self._method_cache = {}
+
+        visitor = self._method_cache.get(node.__class__.__name__, None)
+        if visitor is None:
+            method = 'visit_' + node.__class__.__name__
+            visitor = getattr(self, method, self.generic_visit)
+            self._method_cache[node.__class__.__name__] = visitor
+
         return visitor(node)
 
     def generic_visit(self, node):
         """ Called if no explicit visitor function exists for a
             node. Implements preorder visiting of the node.
         """
-        for c_name, c in node.children():
+        for c in node:
             self.visit(c)
 
-
 '''
-
-
-if __name__ == "__main__":
-    import sys
-    ast_gen = ASTCodeGenerator('_c_ast.cfg')
-    ast_gen.generate(open('c_ast.py', 'w'))
-
diff --git a/lib_pypy/cffi/_pycparser/_build_tables.py 
b/lib_pypy/cffi/_pycparser/_build_tables.py
--- a/lib_pypy/cffi/_pycparser/_build_tables.py
+++ b/lib_pypy/cffi/_pycparser/_build_tables.py
@@ -6,17 +6,21 @@
 # Also generates AST code from the configuration file.
 # Should be called from the pycparser directory.
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #-----------------------------------------------------------------
 
+# Insert '.' and '..' as first entries to the search path for modules.
+# Restricted environments like embeddable python do not include the
+# current working directory on startup.
+import sys
+sys.path[0:0] = ['.', '..']
+
 # Generate c_ast.py
 from _ast_gen import ASTCodeGenerator
 ast_gen = ASTCodeGenerator('_c_ast.cfg')
 ast_gen.generate(open('c_ast.py', 'w'))
 
-import sys
-sys.path[0:0] = ['.', '..']
 from pycparser import c_parser
 
 # Generates the tables
diff --git a/lib_pypy/cffi/_pycparser/ast_transforms.py 
b/lib_pypy/cffi/_pycparser/ast_transforms.py
--- a/lib_pypy/cffi/_pycparser/ast_transforms.py
+++ b/lib_pypy/cffi/_pycparser/ast_transforms.py
@@ -3,7 +3,7 @@
 #
 # Some utilities used by the parser to create a friendlier AST.
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #------------------------------------------------------------------------------
 
@@ -43,7 +43,7 @@
                     Default:
                         break
 
-        The goal of this transform it to fix this mess, turning it into the
+        The goal of this transform is to fix this mess, turning it into the
         following:
 
             Switch
@@ -74,7 +74,8 @@
 
     # Goes over the children of the Compound below the Switch, adding them
     # either directly below new_compound or below the last Case as appropriate
-    for child in switch_node.stmt.block_items:
+    # (for `switch(cond) {}`, block_items would have been None)
+    for child in (switch_node.stmt.block_items or []):
         if isinstance(child, (c_ast.Case, c_ast.Default)):
             # If it's a Case/Default:
             # 1. Add it to the Compound and mark as "last case"
diff --git a/lib_pypy/cffi/_pycparser/c_ast.py 
b/lib_pypy/cffi/_pycparser/c_ast.py
--- a/lib_pypy/cffi/_pycparser/c_ast.py
+++ b/lib_pypy/cffi/_pycparser/c_ast.py
@@ -11,18 +11,45 @@
 #
 # AST Node classes.
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #-----------------------------------------------------------------
 
 
 import sys
 
+def _repr(obj):
+    """
+    Get the representation of an object, with dedicated pprint-like format for 
lists.
+    """
+    if isinstance(obj, list):
+        return '[' + (',\n '.join((_repr(e).replace('\n', '\n ') for e in 
obj))) + '\n]'
+    else:
+        return repr(obj) 
 
 class Node(object):
     __slots__ = ()
     """ Abstract base class for AST nodes.
     """
+    def __repr__(self):
+        """ Generates a python representation of the current node
+        """
+        result = self.__class__.__name__ + '('
+        
+        indent = ''
+        separator = ''
+        for name in self.__slots__[:-2]:
+            result += separator
+            result += indent
+            result += name + '=' + (_repr(getattr(self, name)).replace('\n', 
'\n  ' + (' ' * (len(name) + len(self.__class__.__name__)))))
+            
+            separator = ','
+            indent = '\n ' + (' ' * len(self.__class__.__name__))
+        
+        result += indent + ')'
+        
+        return result
+
     def children(self):
         """ A sequence of all children that are Nodes
         """
@@ -112,21 +139,31 @@
         *   Modeled after Python's own AST visiting facilities
             (the ast module of Python 3.0)
     """
+
+    _method_cache = None
+
     def visit(self, node):
         """ Visit a node.
         """
-        method = 'visit_' + node.__class__.__name__
-        visitor = getattr(self, method, self.generic_visit)
+
+        if self._method_cache is None:
+            self._method_cache = {}
+
+        visitor = self._method_cache.get(node.__class__.__name__, None)
+        if visitor is None:
+            method = 'visit_' + node.__class__.__name__
+            visitor = getattr(self, method, self.generic_visit)
+            self._method_cache[node.__class__.__name__] = visitor
+
         return visitor(node)
 
     def generic_visit(self, node):
         """ Called if no explicit visitor function exists for a
             node. Implements preorder visiting of the node.
         """
-        for c_name, c in node.children():
+        for c in node:
             self.visit(c)
 
-
 class ArrayDecl(Node):
     __slots__ = ('type', 'dim', 'dim_quals', 'coord', '__weakref__')
     def __init__(self, type, dim, dim_quals, coord=None):
@@ -141,6 +178,12 @@
         if self.dim is not None: nodelist.append(("dim", self.dim))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.type is not None:
+            yield self.type
+        if self.dim is not None:
+            yield self.dim
+
     attr_names = ('dim_quals', )
 
 class ArrayRef(Node):
@@ -156,6 +199,12 @@
         if self.subscript is not None: nodelist.append(("subscript", 
self.subscript))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.name is not None:
+            yield self.name
+        if self.subscript is not None:
+            yield self.subscript
+
     attr_names = ()
 
 class Assignment(Node):
@@ -172,6 +221,12 @@
         if self.rvalue is not None: nodelist.append(("rvalue", self.rvalue))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.lvalue is not None:
+            yield self.lvalue
+        if self.rvalue is not None:
+            yield self.rvalue
+
     attr_names = ('op', )
 
 class BinaryOp(Node):
@@ -188,6 +243,12 @@
         if self.right is not None: nodelist.append(("right", self.right))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.left is not None:
+            yield self.left
+        if self.right is not None:
+            yield self.right
+
     attr_names = ('op', )
 
 class Break(Node):
@@ -198,6 +259,10 @@
     def children(self):
         return ()
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ()
 
 class Case(Node):
@@ -214,6 +279,12 @@
             nodelist.append(("stmts[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.expr is not None:
+            yield self.expr
+        for child in (self.stmts or []):
+            yield child
+
     attr_names = ()
 
 class Cast(Node):
@@ -229,6 +300,12 @@
         if self.expr is not None: nodelist.append(("expr", self.expr))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.to_type is not None:
+            yield self.to_type
+        if self.expr is not None:
+            yield self.expr
+
     attr_names = ()
 
 class Compound(Node):
@@ -243,6 +320,10 @@
             nodelist.append(("block_items[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.block_items or []):
+            yield child
+
     attr_names = ()
 
 class CompoundLiteral(Node):
@@ -258,6 +339,12 @@
         if self.init is not None: nodelist.append(("init", self.init))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.type is not None:
+            yield self.type
+        if self.init is not None:
+            yield self.init
+
     attr_names = ()
 
 class Constant(Node):
@@ -271,6 +358,10 @@
         nodelist = []
         return tuple(nodelist)
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ('type', 'value', )
 
 class Continue(Node):
@@ -281,6 +372,10 @@
     def children(self):
         return ()
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ()
 
 class Decl(Node):
@@ -302,6 +397,14 @@
         if self.bitsize is not None: nodelist.append(("bitsize", self.bitsize))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.type is not None:
+            yield self.type
+        if self.init is not None:
+            yield self.init
+        if self.bitsize is not None:
+            yield self.bitsize
+
     attr_names = ('name', 'quals', 'storage', 'funcspec', )
 
 class DeclList(Node):
@@ -316,6 +419,10 @@
             nodelist.append(("decls[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.decls or []):
+            yield child
+
     attr_names = ()
 
 class Default(Node):
@@ -330,6 +437,10 @@
             nodelist.append(("stmts[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.stmts or []):
+            yield child
+
     attr_names = ()
 
 class DoWhile(Node):
@@ -345,6 +456,12 @@
         if self.stmt is not None: nodelist.append(("stmt", self.stmt))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.cond is not None:
+            yield self.cond
+        if self.stmt is not None:
+            yield self.stmt
+
     attr_names = ()
 
 class EllipsisParam(Node):
@@ -355,6 +472,10 @@
     def children(self):
         return ()
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ()
 
 class EmptyStatement(Node):
@@ -365,6 +486,10 @@
     def children(self):
         return ()
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ()
 
 class Enum(Node):
@@ -379,6 +504,10 @@
         if self.values is not None: nodelist.append(("values", self.values))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.values is not None:
+            yield self.values
+
     attr_names = ('name', )
 
 class Enumerator(Node):
@@ -393,6 +522,10 @@
         if self.value is not None: nodelist.append(("value", self.value))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.value is not None:
+            yield self.value
+
     attr_names = ('name', )
 
 class EnumeratorList(Node):
@@ -407,6 +540,10 @@
             nodelist.append(("enumerators[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.enumerators or []):
+            yield child
+
     attr_names = ()
 
 class ExprList(Node):
@@ -421,6 +558,10 @@
             nodelist.append(("exprs[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.exprs or []):
+            yield child
+
     attr_names = ()
 
 class FileAST(Node):
@@ -435,6 +576,10 @@
             nodelist.append(("ext[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.ext or []):
+            yield child
+
     attr_names = ()
 
 class For(Node):
@@ -454,6 +599,16 @@
         if self.stmt is not None: nodelist.append(("stmt", self.stmt))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.init is not None:
+            yield self.init
+        if self.cond is not None:
+            yield self.cond
+        if self.next is not None:
+            yield self.next
+        if self.stmt is not None:
+            yield self.stmt
+
     attr_names = ()
 
 class FuncCall(Node):
@@ -469,6 +624,12 @@
         if self.args is not None: nodelist.append(("args", self.args))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.name is not None:
+            yield self.name
+        if self.args is not None:
+            yield self.args
+
     attr_names = ()
 
 class FuncDecl(Node):
@@ -484,6 +645,12 @@
         if self.type is not None: nodelist.append(("type", self.type))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.args is not None:
+            yield self.args
+        if self.type is not None:
+            yield self.type
+
     attr_names = ()
 
 class FuncDef(Node):
@@ -502,6 +669,14 @@
             nodelist.append(("param_decls[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.decl is not None:
+            yield self.decl
+        if self.body is not None:
+            yield self.body
+        for child in (self.param_decls or []):
+            yield child
+
     attr_names = ()
 
 class Goto(Node):
@@ -514,6 +689,10 @@
         nodelist = []
         return tuple(nodelist)
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ('name', )
 
 class ID(Node):
@@ -526,6 +705,10 @@
         nodelist = []
         return tuple(nodelist)
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ('name', )
 
 class IdentifierType(Node):
@@ -538,6 +721,10 @@
         nodelist = []
         return tuple(nodelist)
 
+    def __iter__(self):
+        return
+        yield
+
     attr_names = ('names', )
 
 class If(Node):
@@ -555,6 +742,14 @@
         if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.cond is not None:
+            yield self.cond
+        if self.iftrue is not None:
+            yield self.iftrue
+        if self.iffalse is not None:
+            yield self.iffalse
+
     attr_names = ()
 
 class InitList(Node):
@@ -569,6 +764,10 @@
             nodelist.append(("exprs[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.exprs or []):
+            yield child
+
     attr_names = ()
 
 class Label(Node):
@@ -583,6 +782,10 @@
         if self.stmt is not None: nodelist.append(("stmt", self.stmt))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.stmt is not None:
+            yield self.stmt
+
     attr_names = ('name', )
 
 class NamedInitializer(Node):
@@ -599,6 +802,12 @@
             nodelist.append(("name[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.expr is not None:
+            yield self.expr
+        for child in (self.name or []):
+            yield child
+
     attr_names = ()
 
 class ParamList(Node):
@@ -613,6 +822,10 @@
             nodelist.append(("params[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.params or []):
+            yield child
+
     attr_names = ()
 
 class PtrDecl(Node):
@@ -627,6 +840,10 @@
         if self.type is not None: nodelist.append(("type", self.type))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.type is not None:
+            yield self.type
+
     attr_names = ('quals', )
 
 class Return(Node):
@@ -640,6 +857,10 @@
         if self.expr is not None: nodelist.append(("expr", self.expr))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.expr is not None:
+            yield self.expr
+
     attr_names = ()
 
 class Struct(Node):
@@ -655,6 +876,10 @@
             nodelist.append(("decls[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.decls or []):
+            yield child
+
     attr_names = ('name', )
 
 class StructRef(Node):
@@ -671,6 +896,12 @@
         if self.field is not None: nodelist.append(("field", self.field))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.name is not None:
+            yield self.name
+        if self.field is not None:
+            yield self.field
+
     attr_names = ('type', )
 
 class Switch(Node):
@@ -686,6 +917,12 @@
         if self.stmt is not None: nodelist.append(("stmt", self.stmt))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.cond is not None:
+            yield self.cond
+        if self.stmt is not None:
+            yield self.stmt
+
     attr_names = ()
 
 class TernaryOp(Node):
@@ -703,6 +940,14 @@
         if self.iffalse is not None: nodelist.append(("iffalse", self.iffalse))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.cond is not None:
+            yield self.cond
+        if self.iftrue is not None:
+            yield self.iftrue
+        if self.iffalse is not None:
+            yield self.iffalse
+
     attr_names = ()
 
 class TypeDecl(Node):
@@ -718,6 +963,10 @@
         if self.type is not None: nodelist.append(("type", self.type))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.type is not None:
+            yield self.type
+
     attr_names = ('declname', 'quals', )
 
 class Typedef(Node):
@@ -734,6 +983,10 @@
         if self.type is not None: nodelist.append(("type", self.type))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.type is not None:
+            yield self.type
+
     attr_names = ('name', 'quals', 'storage', )
 
 class Typename(Node):
@@ -749,6 +1002,10 @@
         if self.type is not None: nodelist.append(("type", self.type))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.type is not None:
+            yield self.type
+
     attr_names = ('name', 'quals', )
 
 class UnaryOp(Node):
@@ -763,6 +1020,10 @@
         if self.expr is not None: nodelist.append(("expr", self.expr))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.expr is not None:
+            yield self.expr
+
     attr_names = ('op', )
 
 class Union(Node):
@@ -778,6 +1039,10 @@
             nodelist.append(("decls[%d]" % i, child))
         return tuple(nodelist)
 
+    def __iter__(self):
+        for child in (self.decls or []):
+            yield child
+
     attr_names = ('name', )
 
 class While(Node):
@@ -793,5 +1058,27 @@
         if self.stmt is not None: nodelist.append(("stmt", self.stmt))
         return tuple(nodelist)
 
+    def __iter__(self):
+        if self.cond is not None:
+            yield self.cond
+        if self.stmt is not None:
+            yield self.stmt
+
     attr_names = ()
 
+class Pragma(Node):
+    __slots__ = ('string', 'coord', '__weakref__')
+    def __init__(self, string, coord=None):
+        self.string = string
+        self.coord = coord
+
+    def children(self):
+        nodelist = []
+        return tuple(nodelist)
+
+    def __iter__(self):
+        return
+        yield
+
+    attr_names = ('string', )
+
diff --git a/lib_pypy/cffi/_pycparser/c_generator.py 
b/lib_pypy/cffi/_pycparser/c_generator.py
--- a/lib_pypy/cffi/_pycparser/c_generator.py
+++ b/lib_pypy/cffi/_pycparser/c_generator.py
@@ -3,7 +3,7 @@
 #
 # C code generator from pycparser AST nodes.
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #------------------------------------------------------------------------------
 from . import c_ast
@@ -40,6 +40,12 @@
     def visit_ID(self, n):
         return n.name
 
+    def visit_Pragma(self, n):
+        ret = '#pragma'
+        if n.string:
+            ret += ' ' + n.string
+        return ret
+
     def visit_ArrayRef(self, n):
         arrref = self._parenthesize_unless_simple(n.name)
         return arrref + '[' + self.visit(n.subscript) + ']'
@@ -113,7 +119,7 @@
         return s
 
     def visit_Cast(self, n):
-        s = '(' + self._generate_type(n.to_type) + ')'
+        s = '(' + self._generate_type(n.to_type, emit_declname=False) + ')'
         return s + ' ' + self._parenthesize_unless_simple(n.expr)
 
     def visit_ExprList(self, n):
@@ -129,18 +135,20 @@
         return ', '.join(visited_subexprs)
 
     def visit_Enum(self, n):
-        s = 'enum'
-        if n.name: s += ' ' + n.name
-        if n.values:
-            s += ' {'
-            for i, enumerator in enumerate(n.values.enumerators):
-                s += enumerator.name
-                if enumerator.value:
-                    s += ' = ' + self.visit(enumerator.value)
-                if i != len(n.values.enumerators) - 1:
-                    s += ', '
-            s += '}'
-        return s
+        return self._generate_struct_union_enum(n, name='enum')
+
+    def visit_Enumerator(self, n):
+        if not n.value:
+            return '{indent}{name},\n'.format(
+                indent=self._make_indent(),
+                name=n.name,
+            )
+        else:
+            return '{indent}{name} = {value},\n'.format(
+                indent=self._make_indent(),
+                name=n.name,
+                value=self.visit(n.value),
+            )
 
     def visit_FuncDef(self, n):
         decl = self.visit(n.decl)
@@ -157,6 +165,8 @@
         for ext in n.ext:
             if isinstance(ext, c_ast.FuncDef):
                 s += self.visit(ext)
+            elif isinstance(ext, c_ast.Pragma):
+                s += self.visit(ext) + '\n'
             else:
                 s += self.visit(ext) + ';\n'
         return s
@@ -170,6 +180,10 @@
         s += self._make_indent() + '}\n'
         return s
 
+    def visit_CompoundLiteral(self, n):
+        return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
+
+
     def visit_EmptyStatement(self, n):
         return ';'
 
@@ -188,9 +202,9 @@
         return 'continue;'
 
     def visit_TernaryOp(self, n):
-        s = self._visit_expr(n.cond) + ' ? '
-        s += self._visit_expr(n.iftrue) + ' : '
-        s += self._visit_expr(n.iffalse)
+        s  = '(' + self._visit_expr(n.cond) + ') ? '
+        s += '(' + self._visit_expr(n.iftrue) + ') : '
+        s += '(' + self._visit_expr(n.iffalse) + ')'
         return s
 
     def visit_If(self, n):
@@ -256,43 +270,67 @@
         return '...'
 
     def visit_Struct(self, n):
-        return self._generate_struct_union(n, 'struct')
+        return self._generate_struct_union_enum(n, 'struct')
 
     def visit_Typename(self, n):
         return self._generate_type(n.type)
 
     def visit_Union(self, n):
-        return self._generate_struct_union(n, 'union')
+        return self._generate_struct_union_enum(n, 'union')
 
     def visit_NamedInitializer(self, n):
         s = ''
         for name in n.name:
             if isinstance(name, c_ast.ID):
                 s += '.' + name.name
-            elif isinstance(name, c_ast.Constant):
-                s += '[' + name.value + ']'
-        s += ' = ' + self.visit(n.expr)
+            else:
+                s += '[' + self.visit(name) + ']'
+        s += ' = ' + self._visit_expr(n.expr)
         return s
 
     def visit_FuncDecl(self, n):
         return self._generate_type(n)
 
-    def _generate_struct_union(self, n, name):
-        """ Generates code for structs and unions. name should be either
-            'struct' or union.
+    def visit_ArrayDecl(self, n):
+        return self._generate_type(n, emit_declname=False)
+
+    def visit_TypeDecl(self, n):
+        return self._generate_type(n, emit_declname=False)
+
+    def visit_PtrDecl(self, n):
+        return self._generate_type(n, emit_declname=False)
+
+    def _generate_struct_union_enum(self, n, name):
+        """ Generates code for structs, unions, and enums. name should be
+            'struct', 'union', or 'enum'.
         """
+        if name in ('struct', 'union'):
+            members = n.decls
+            body_function = self._generate_struct_union_body
+        else:
+            assert name == 'enum'
+            members = None if n.values is None else n.values.enumerators
+            body_function = self._generate_enum_body
         s = name + ' ' + (n.name or '')
-        if n.decls:
+        if members is not None:
+            # None means no members
+            # Empty sequence means an empty list of members
             s += '\n'
             s += self._make_indent()
             self.indent_level += 2
             s += '{\n'
-            for decl in n.decls:
-                s += self._generate_stmt(decl)
+            s += body_function(members)
             self.indent_level -= 2
             s += self._make_indent() + '}'
         return s
 
+    def _generate_struct_union_body(self, members):
+        return ''.join(self._generate_stmt(decl) for decl in members)
+
+    def _generate_enum_body(self, members):
+        # `[:-2] + '\n'` removes the final `,` from the enumerator list
+        return ''.join(self.visit(value) for value in members)[:-2] + '\n'
+
     def _generate_stmt(self, n, add_indent=False):
         """ Generation from a statement node. This method exists as a wrapper
             for individual visit_* methods to handle different treatment of
@@ -330,7 +368,7 @@
         s += self._generate_type(n.type)
         return s
 
-    def _generate_type(self, n, modifiers=[]):
+    def _generate_type(self, n, modifiers=[], emit_declname = True):
         """ Recursive generation from a type node. n is the type node.
             modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers
             encountered on the way down to a TypeDecl, to allow proper
@@ -344,23 +382,29 @@
             if n.quals: s += ' '.join(n.quals) + ' '
             s += self.visit(n.type)
 
-            nstr = n.declname if n.declname else ''
+            nstr = n.declname if n.declname and emit_declname else ''
             # Resolve modifiers.
             # Wrap in parens to distinguish pointer to array and pointer to
             # function syntax.
             #
             for i, modifier in enumerate(modifiers):
                 if isinstance(modifier, c_ast.ArrayDecl):
-                    if (i != 0 and isinstance(modifiers[i - 1], 
c_ast.PtrDecl)):
-                        nstr = '(' + nstr + ')'
-                    nstr += '[' + self.visit(modifier.dim) + ']'
+                    if (i != 0 and
+                        isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+                            nstr = '(' + nstr + ')'
+                    nstr += '['
+                    if modifier.dim_quals:
+                        nstr += ' '.join(modifier.dim_quals) + ' '
+                    nstr += self.visit(modifier.dim) + ']'
                 elif isinstance(modifier, c_ast.FuncDecl):
-                    if (i != 0 and isinstance(modifiers[i - 1], 
c_ast.PtrDecl)):
-                        nstr = '(' + nstr + ')'
+                    if (i != 0 and
+                        isinstance(modifiers[i - 1], c_ast.PtrDecl)):
+                            nstr = '(' + nstr + ')'
                     nstr += '(' + self.visit(modifier.args) + ')'
                 elif isinstance(modifier, c_ast.PtrDecl):
                     if modifier.quals:
-                        nstr = '* %s %s' % (' '.join(modifier.quals), nstr)
+                        nstr = '* %s%s' % (' '.join(modifier.quals),
+                                           ' ' + nstr if nstr else '')
                     else:
                         nstr = '*' + nstr
             if nstr: s += ' ' + nstr
@@ -368,11 +412,12 @@
         elif typ == c_ast.Decl:
             return self._generate_decl(n.type)
         elif typ == c_ast.Typename:
-            return self._generate_type(n.type)
+            return self._generate_type(n.type, emit_declname = emit_declname)
         elif typ == c_ast.IdentifierType:
             return ' '.join(n.names) + ' '
         elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl):
-            return self._generate_type(n.type, modifiers + [n])
+            return self._generate_type(n.type, modifiers + [n],
+                                       emit_declname = emit_declname)
         else:
             return self.visit(n)
 
@@ -395,5 +440,5 @@
         """ Returns True for nodes that are "simple" - i.e. nodes that always
             have higher precedence than operators.
         """
-        return isinstance(n,(   c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
-                                c_ast.StructRef, c_ast.FuncCall))
+        return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
+                              c_ast.StructRef, c_ast.FuncCall))
diff --git a/lib_pypy/cffi/_pycparser/c_lexer.py 
b/lib_pypy/cffi/_pycparser/c_lexer.py
--- a/lib_pypy/cffi/_pycparser/c_lexer.py
+++ b/lib_pypy/cffi/_pycparser/c_lexer.py
@@ -3,7 +3,7 @@
 #
 # CLexer class: lexer for the C language
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #------------------------------------------------------------------------------
 import re
@@ -19,7 +19,7 @@
         tokens.
 
         The public attribute filename can be set to an initial
-        filaneme, but the lexer will update it upon #line
+        filename, but the lexer will update it upon #line
         directives.
     """
     def __init__(self, error_func, on_lbrace_func, on_rbrace_func,
@@ -52,8 +52,8 @@
         # Allow either "# line" or "# <num>" to support GCC's
         # cpp output
         #
-        self.line_pattern = re.compile('([ \t]*line\W)|([ \t]*\d+)')
-        self.pragma_pattern = re.compile('[ \t]*pragma\W')
+        self.line_pattern = re.compile(r'([ \t]*line\W)|([ \t]*\d+)')
+        self.pragma_pattern = re.compile(r'[ \t]*pragma\W')
 
     def build(self, **kwargs):
         """ Builds the lexer from the specification. Must be
@@ -102,11 +102,11 @@
     keywords = (
         '_BOOL', '_COMPLEX', 'AUTO', 'BREAK', 'CASE', 'CHAR', 'CONST',
         'CONTINUE', 'DEFAULT', 'DO', 'DOUBLE', 'ELSE', 'ENUM', 'EXTERN',
-        'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG', 
+        'FLOAT', 'FOR', 'GOTO', 'IF', 'INLINE', 'INT', 'LONG',
         'REGISTER', 'OFFSETOF',
         'RESTRICT', 'RETURN', 'SHORT', 'SIGNED', 'SIZEOF', 'STATIC', 'STRUCT',
         'SWITCH', 'TYPEDEF', 'UNION', 'UNSIGNED', 'VOID',
-        'VOLATILE', 'WHILE',
+        'VOLATILE', 'WHILE', '__INT128',
     )
 
     keyword_map = {}
@@ -171,7 +171,9 @@
         'ELLIPSIS',
 
         # pre-processor
-        'PPHASH',      # '#'
+        'PPHASH',       # '#'
+        'PPPRAGMA',     # 'pragma'
+        'PPPRAGMASTR',
     )
 
     ##
@@ -203,12 +205,37 @@
     # parse all correct code, even if it means to sometimes parse incorrect
     # code.
     #
-    simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
-    decimal_escape = r"""(\d+)"""
-    hex_escape = r"""(x[0-9a-fA-F]+)"""
-    bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
+    # The original regexes were taken verbatim from the C syntax definition,
+    # and were later modified to avoid worst-case exponential running time.
+    #
+    #   simple_escape = r"""([a-zA-Z._~!=&\^\-\\?'"])"""
+    #   decimal_escape = r"""(\d+)"""
+    #   hex_escape = r"""(x[0-9a-fA-F]+)"""
+    #   bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-7])"""
+    #
+    # The following modifications were made to avoid the ambiguity that 
allowed backtracking:
+    # (https://github.com/eliben/pycparser/issues/61)
+    #
+    # - \x was removed from simple_escape, unless it was not followed by a hex 
digit, to avoid ambiguity with hex_escape.
+    # - hex_escape allows one or more hex characters, but requires that the 
next character(if any) is not hex
+    # - decimal_escape allows one or more decimal characters, but requires 
that the next character(if any) is not a decimal
+    # - bad_escape does not allow any decimals (8-9), to avoid conflicting 
with the permissive decimal_escape.
+    #
+    # Without this change, python's `re` module would recursively try parsing 
each ambiguous escape sequence in multiple ways.
+    # e.g. `\123` could be parsed as `\1`+`23`, `\12`+`3`, and `\123`.
+
+    simple_escape = r"""([a-wyzA-Z._~!=&\^\-\\?'"]|x(?![0-9a-fA-F]))"""
+    decimal_escape = r"""(\d+)(?!\d)"""
+    hex_escape = r"""(x[0-9a-fA-F]+)(?![0-9a-fA-F])"""
+    bad_escape = r"""([\\][^a-zA-Z._~^!=&\^\-\\?'"x0-9])"""
 
     escape_sequence = 
r"""(\\("""+simple_escape+'|'+decimal_escape+'|'+hex_escape+'))'
+
+    # This complicated regex with lookahead might be slow for strings, so 
because all of the valid escapes (including \x) allowed
+    # 0 or more non-escaped characters after the first character, 
simple_escape+decimal_escape+hex_escape got simplified to
+
+    escape_sequence_start_in_string = r"""(\\[0-9a-zA-Z._~!=&\^\-\\?'"])"""
+
     cconst_char = r"""([^'\\\n]|"""+escape_sequence+')'
     char_const = "'"+cconst_char+"'"
     wchar_const = 'L'+char_const
@@ -216,7 +243,7 @@
     bad_char_const = 
r"""('"""+cconst_char+"""[^'\n]+')|('')|('"""+bad_escape+r"""[^'\n]*')"""
 
     # string literals (K&R2: A.2.6)
-    string_char = r"""([^"\\\n]|"""+escape_sequence+')'
+    string_char = r"""([^"\\\n]|"""+escape_sequence_start_in_string+')'
     string_literal = '"'+string_char+'*"'
     wstring_literal = 'L'+string_literal
     bad_string_literal = '"'+string_char+'*'+bad_escape+string_char+'*"'
@@ -274,7 +301,6 @@
 
     def t_ppline_NEWLINE(self, t):
         r'\n'
-
         if self.pp_line is None:
             self._error('line number missing in #line', t)
         else:
@@ -304,15 +330,14 @@
 
     def t_pppragma_PPPRAGMA(self, t):
         r'pragma'
-        pass
+        return t
 
-    t_pppragma_ignore = ' \t<>.-{}();=+-*/$%@&^~!?:,0123456789'
+    t_pppragma_ignore = ' \t'
 
-    @TOKEN(string_literal)
-    def t_pppragma_STR(self, t): pass
-
-    @TOKEN(identifier)
-    def t_pppragma_ID(self, t): pass
+    def t_pppragma_STR(self, t):
+        '.+'
+        t.type = 'PPPRAGMASTR'
+        return t
 
     def t_pppragma_error(self, t):
         self._error('invalid #pragma directive', t)
@@ -482,4 +507,3 @@
     def t_error(self, t):
         msg = 'Illegal character %s' % repr(t.value[0])
         self._error(msg, t)
-
diff --git a/lib_pypy/cffi/_pycparser/c_parser.py 
b/lib_pypy/cffi/_pycparser/c_parser.py
--- a/lib_pypy/cffi/_pycparser/c_parser.py
+++ b/lib_pypy/cffi/_pycparser/c_parser.py
@@ -3,7 +3,7 @@
 #
 # CParser class: Parser and AST builder for the C language
 #
-# Copyright (C) 2008-2015, Eli Bendersky
+# Eli Bendersky [https://eli.thegreenplace.net/]
 # License: BSD
 #------------------------------------------------------------------------------
 import re
@@ -12,14 +12,16 @@
 
 from . import c_ast
 from .c_lexer import CLexer
-from .plyparser import PLYParser, Coord, ParseError
+from .plyparser import PLYParser, Coord, ParseError, parameterized, template
 from .ast_transforms import fix_switch_cases
 
 
+@template
 class CParser(PLYParser):
     def __init__(
             self,
             lex_optimize=True,
+            lexer=CLexer,
             lextab='cffi._pycparser.lextab',
             yacc_optimize=True,
             yacctab='cffi._pycparser.yacctab',
@@ -42,6 +44,10 @@
                 to save the re-generation of the lexer table on
                 each run.
 
+            lexer:
+                Set this parameter to define the lexer to use if
+                you're not using the default CLexer.
+
             lextab:
                 Points to the lex table that's used for optimized
                 mode. Only if you're modifying the lexer and want
@@ -70,7 +76,7 @@
                 Set this parameter to control the location of generated
                 lextab and yacctab files.
         """
-        self.clex = CLexer(
+        self.clex = lexer(
             error_func=self._lex_error_func,
             on_lbrace_func=self._lex_on_lbrace_func,
             on_rbrace_func=self._lex_on_rbrace_func,
@@ -86,14 +92,14 @@
             'abstract_declarator',
             'assignment_expression',
             'declaration_list',
-            'declaration_specifiers',
+            'declaration_specifiers_no_type',
             'designation',
             'expression',
             'identifier_list',
             'init_declarator_list',
+            'id_init_declarator_list',
             'initializer_list',
             'parameter_type_list',
-            'specifier_qualifier_list',
             'block_item_list',
             'type_qualifier_list',
             'struct_declarator_list'
@@ -342,7 +348,7 @@
                 coord=typename[0].coord)
         return decl
 
-    def _add_declaration_specifier(self, declspec, newspec, kind):
+    def _add_declaration_specifier(self, declspec, newspec, kind, 
append=False):
         """ Declaration specifiers are represented by a dictionary
             with the entries:
             * qual: a list of type qualifiers
@@ -352,11 +358,18 @@
 
             This method is given a declaration specifier, and a
             new specifier of a given kind.
+            If `append` is True, the new specifier is added to the end of
+            the specifiers list, otherwise it's added at the beginning.
             Returns the declaration specifier, with the new
             specifier incorporated.
         """
         spec = declspec or dict(qual=[], storage=[], type=[], function=[])
-        spec[kind].insert(0, newspec)
+
+        if append:
+            spec[kind].append(newspec)
+        else:
+            spec[kind].insert(0, newspec)
+
         return spec
 
     def _build_declarations(self, spec, decls, typedef_namespace=False):
@@ -516,8 +529,7 @@
     def p_translation_unit_2(self, p):
         """ translation_unit    : translation_unit external_declaration
         """
-        if p[2] is not None:
-            p[1].extend(p[2])
+        p[1].extend(p[2])
         p[0] = p[1]
 
     # Declarations always come as lists (because they can be
@@ -537,32 +549,42 @@
 
     def p_external_declaration_3(self, p):
         """ external_declaration    : pp_directive
+                                    | pppragma_directive
         """
-        p[0] = p[1]
+        p[0] = [p[1]]
 
     def p_external_declaration_4(self, p):
         """ external_declaration    : SEMI
         """
-        p[0] = None
+        p[0] = []
 
     def p_pp_directive(self, p):
         """ pp_directive  : PPHASH
         """
         self._parse_error('Directives not supported yet',
-            self._coord(p.lineno(1)))
+                          self._token_coord(p, 1))
+
+    def p_pppragma_directive(self, p):
+        """ pppragma_directive      : PPPRAGMA
+                                    | PPPRAGMA PPPRAGMASTR
+        """
+        if len(p) == 3:
+            p[0] = c_ast.Pragma(p[2], self._token_coord(p, 2))
+        else:
+            p[0] = c_ast.Pragma("", self._token_coord(p, 1))
 
     # In function definitions, the declarator can be followed by
     # a declaration list, for old "K&R style" function definitios.
     #
     def p_function_definition_1(self, p):
-        """ function_definition : declarator declaration_list_opt 
compound_statement
+        """ function_definition : id_declarator declaration_list_opt 
compound_statement
         """
         # no declaration specifiers - 'int' becomes the default type
         spec = dict(
             qual=[],
             storage=[],
             type=[c_ast.IdentifierType(['int'],
-                                       coord=self._coord(p.lineno(1)))],
+                                       coord=self._token_coord(p, 1))],
             function=[])
 
         p[0] = self._build_function_definition(
@@ -572,7 +594,7 @@
             body=p[3])
 
     def p_function_definition_2(self, p):
-        """ function_definition : declaration_specifiers declarator 
declaration_list_opt compound_statement
+        """ function_definition : declaration_specifiers id_declarator 
declaration_list_opt compound_statement
         """
         spec = p[1]
 
@@ -589,9 +611,63 @@
                         | selection_statement
                         | iteration_statement
                         | jump_statement
+                        | pppragma_directive
         """
         p[0] = p[1]
 
+    # A pragma is generally considered a decorator rather than an actual 
statement.
+    # Still, for the purposes of analyzing an abstract syntax tree of C code,
+    # pragma's should not be ignored and were previously treated as a 
statement.
+    # This presents a problem for constructs that take a statement such as 
labeled_statements,
+    # selection_statements, and iteration_statements, causing a misleading 
structure
+    # in the AST. For example, consider the following C code.
+    #
+    #   for (int i = 0; i < 3; i++)
+    #       #pragma omp critical
+    #       sum += 1;
+    #
+    # This code will compile and execute "sum += 1;" as the body of the for 
loop.
+    # Previous implementations of PyCParser would render the AST for this
+    # block of code as follows:
+    #
+    #   For:
+    #     DeclList:
+    #       Decl: i, [], [], []
+    #         TypeDecl: i, []
+    #           IdentifierType: ['int']
+    #         Constant: int, 0
+    #     BinaryOp: <
+    #       ID: i
+    #       Constant: int, 3
+    #     UnaryOp: p++
+    #       ID: i
+    #     Pragma: omp critical
+    #   Assignment: +=
+    #     ID: sum
+    #     Constant: int, 1
+    #
+    # This AST misleadingly takes the Pragma as the body of the loop and the
+    # assignment then becomes a sibling of the loop.
+    #
+    # To solve edge cases like these, the pragmacomp_or_statement rule groups
+    # a pragma and its following statement (which would otherwise be orphaned)
+    # using a compound block, effectively turning the above code into:
+    #
+    #   for (int i = 0; i < 3; i++) {
+    #       #pragma omp critical
+    #       sum += 1;
+    #   }
+    def p_pragmacomp_or_statement(self, p):
+        """ pragmacomp_or_statement     : pppragma_directive statement
+                                        | statement
+        """
+        if isinstance(p[1], c_ast.Pragma) and len(p) == 3:
+            p[0] = c_ast.Compound(
+                block_items=[p[1], p[2]],
+                coord=self._token_coord(p, 1))
+        else:
+            p[0] = p[1]
+
     # In C, declarations can come several in a line:
     #   int x, *px, romulo = 5;
     #
@@ -603,6 +679,7 @@
     #
     def p_decl_body(self, p):
         """ decl_body : declaration_specifiers init_declarator_list_opt
+                      | declaration_specifiers_no_type 
id_init_declarator_list_opt
         """
         spec = p[1]
 
@@ -675,26 +752,58 @@
         """
         p[0] = p[1] if len(p) == 2 else p[1] + p[2]
 
-    def p_declaration_specifiers_1(self, p):
-        """ declaration_specifiers  : type_qualifier declaration_specifiers_opt
+    # To know when declaration-specifiers end and declarators begin,
+    # we require declaration-specifiers to have at least one
+    # type-specifier, and disallow typedef-names after we've seen any
+    # type-specifier. These are both required by the spec.
+    #
+    def p_declaration_specifiers_no_type_1(self, p):
+        """ declaration_specifiers_no_type  : type_qualifier 
declaration_specifiers_no_type_opt
         """
         p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
 
-    def p_declaration_specifiers_2(self, p):
-        """ declaration_specifiers  : type_specifier declaration_specifiers_opt
-        """
-        p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
-
-    def p_declaration_specifiers_3(self, p):
-        """ declaration_specifiers  : storage_class_specifier 
declaration_specifiers_opt
+    def p_declaration_specifiers_no_type_2(self, p):
+        """ declaration_specifiers_no_type  : storage_class_specifier 
declaration_specifiers_no_type_opt
         """
         p[0] = self._add_declaration_specifier(p[2], p[1], 'storage')
 
-    def p_declaration_specifiers_4(self, p):
-        """ declaration_specifiers  : function_specifier 
declaration_specifiers_opt
+    def p_declaration_specifiers_no_type_3(self, p):
+        """ declaration_specifiers_no_type  : function_specifier 
declaration_specifiers_no_type_opt
         """
         p[0] = self._add_declaration_specifier(p[2], p[1], 'function')
 
+
+    def p_declaration_specifiers_1(self, p):
+        """ declaration_specifiers  : declaration_specifiers type_qualifier
+        """
+        p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
+    def p_declaration_specifiers_2(self, p):
+        """ declaration_specifiers  : declaration_specifiers 
storage_class_specifier
+        """
+        p[0] = self._add_declaration_specifier(p[1], p[2], 'storage', 
append=True)
+
+    def p_declaration_specifiers_3(self, p):
+        """ declaration_specifiers  : declaration_specifiers function_specifier
+        """
+        p[0] = self._add_declaration_specifier(p[1], p[2], 'function', 
append=True)
+
+    def p_declaration_specifiers_4(self, p):
+        """ declaration_specifiers  : declaration_specifiers 
type_specifier_no_typeid
+        """
+        p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+    def p_declaration_specifiers_5(self, p):
+        """ declaration_specifiers  : type_specifier
+        """
+        p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+    def p_declaration_specifiers_6(self, p):
+        """ declaration_specifiers  : declaration_specifiers_no_type 
type_specifier
+        """
+        p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
+
+
     def p_storage_class_specifier(self, p):
         """ storage_class_specifier : AUTO
                                     | REGISTER
@@ -709,25 +818,27 @@
         """
         p[0] = p[1]
 
-    def p_type_specifier_1(self, p):
-        """ type_specifier  : VOID
-                            | _BOOL
-                            | CHAR
-                            | SHORT
-                            | INT
-                            | LONG
-                            | FLOAT
-                            | DOUBLE
-                            | _COMPLEX
-                            | SIGNED
-                            | UNSIGNED
+    def p_type_specifier_no_typeid(self, p):
+        """ type_specifier_no_typeid  : VOID
+                                      | _BOOL
+                                      | CHAR
+                                      | SHORT
+                                      | INT
+                                      | LONG
+                                      | FLOAT
+                                      | DOUBLE
+                                      | _COMPLEX
+                                      | SIGNED
+                                      | UNSIGNED
+                                      | __INT128
         """
-        p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1)))
+        p[0] = c_ast.IdentifierType([p[1]], coord=self._token_coord(p, 1))
 
-    def p_type_specifier_2(self, p):
+    def p_type_specifier(self, p):
         """ type_specifier  : typedef_name
                             | enum_specifier
                             | struct_or_union_specifier
+                            | type_specifier_no_typeid
         """
         p[0] = p[1]
 
@@ -738,30 +849,12 @@
         """
         p[0] = p[1]
 
-    def p_init_declarator_list_1(self, p):
+    def p_init_declarator_list(self, p):
         """ init_declarator_list    : init_declarator
                                     | init_declarator_list COMMA 
init_declarator
         """
         p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
 
-    # If the code is declaring a variable that was declared a typedef in an
-    # outer scope, yacc will think the name is part of declaration_specifiers,
-    # not init_declarator, and will then get confused by EQUALS.  Pass None
-    # up in place of declarator, and handle this at a higher level.
-    #
-    def p_init_declarator_list_2(self, p):
-        """ init_declarator_list    : EQUALS initializer
-        """
-        p[0] = [dict(decl=None, init=p[2])]
-
-    # Similarly, if the code contains duplicate typedefs of, for example,
-    # array types, the array portion will appear as an abstract declarator.
-    #
-    def p_init_declarator_list_3(self, p):
-        """ init_declarator_list    : abstract_declarator
-        """
-        p[0] = [dict(decl=p[1], init=None)]
-
     # Returns a {decl=<declarator> : init=<initializer>} dictionary
     # If there's no initializer, uses None
     #
@@ -771,15 +864,40 @@
         """
         p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
 
+    def p_id_init_declarator_list(self, p):
+        """ id_init_declarator_list    : id_init_declarator
+                                       | id_init_declarator_list COMMA 
init_declarator
+        """
+        p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]]
+
+    def p_id_init_declarator(self, p):
+        """ id_init_declarator : id_declarator
+                               | id_declarator EQUALS initializer
+        """
+        p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None))
+
+    # Require at least one type specifier in a specifier-qualifier-list
+    #
     def p_specifier_qualifier_list_1(self, p):
-        """ specifier_qualifier_list    : type_qualifier 
specifier_qualifier_list_opt
+        """ specifier_qualifier_list    : specifier_qualifier_list 
type_specifier_no_typeid
         """
-        p[0] = self._add_declaration_specifier(p[2], p[1], 'qual')
+        p[0] = self._add_declaration_specifier(p[1], p[2], 'type', append=True)
 
     def p_specifier_qualifier_list_2(self, p):
-        """ specifier_qualifier_list    : type_specifier 
specifier_qualifier_list_opt
+        """ specifier_qualifier_list    : specifier_qualifier_list 
type_qualifier
         """
-        p[0] = self._add_declaration_specifier(p[2], p[1], 'type')
+        p[0] = self._add_declaration_specifier(p[1], p[2], 'qual', append=True)
+
+    def p_specifier_qualifier_list_3(self, p):
+        """ specifier_qualifier_list  : type_specifier
+        """
+        p[0] = self._add_declaration_specifier(None, p[1], 'type')
+
+    def p_specifier_qualifier_list_4(self, p):
+        """ specifier_qualifier_list  : type_qualifier_list type_specifier
+        """
+        spec = dict(qual=p[1], storage=[], type=[], function=[])
+        p[0] = self._add_declaration_specifier(spec, p[2], 'type', append=True)
 
     # TYPEID is allowed here (and in other struct/enum related tag names), 
because
     # struct/enum tags reside in their own namespace and can be named the same 
as types
@@ -789,29 +907,48 @@
                                         | struct_or_union TYPEID
         """
         klass = self._select_struct_union_class(p[1])
+        # None means no list of members
         p[0] = klass(
             name=p[2],
             decls=None,
-            coord=self._coord(p.lineno(2)))
+            coord=self._token_coord(p, 2))
 
     def p_struct_or_union_specifier_2(self, p):
         """ struct_or_union_specifier : struct_or_union brace_open 
struct_declaration_list brace_close
+                                      | struct_or_union brace_open brace_close
         """
         klass = self._select_struct_union_class(p[1])
-        p[0] = klass(
-            name=None,
-            decls=p[3],
-            coord=self._coord(p.lineno(2)))
+        if len(p) == 4:
+            # Empty sequence means an empty list of members
+            p[0] = klass(
+                name=None,
+                decls=[],
+                coord=self._token_coord(p, 2))
+        else:
+            p[0] = klass(
+                name=None,
+                decls=p[3],
+                coord=self._token_coord(p, 2))
+
 
     def p_struct_or_union_specifier_3(self, p):
         """ struct_or_union_specifier   : struct_or_union ID brace_open 
struct_declaration_list brace_close
+                                        | struct_or_union ID brace_open 
brace_close
                                         | struct_or_union TYPEID brace_open 
struct_declaration_list brace_close
+                                        | struct_or_union TYPEID brace_open 
brace_close
         """
         klass = self._select_struct_union_class(p[1])
-        p[0] = klass(
-            name=p[2],
-            decls=p[4],
-            coord=self._coord(p.lineno(2)))
+        if len(p) == 5:
+            # Empty sequence means an empty list of members
+            p[0] = klass(
+                name=p[2],
+                decls=[],
+                coord=self._token_coord(p, 2))
+        else:
+            p[0] = klass(
+                name=p[2],
+                decls=p[4],
+                coord=self._token_coord(p, 2))
 
     def p_struct_or_union(self, p):
         """ struct_or_union : STRUCT
@@ -825,7 +962,10 @@
         """ struct_declaration_list     : struct_declaration
                                         | struct_declaration_list 
struct_declaration
         """
-        p[0] = p[1] if len(p) == 2 else p[1] + p[2]
+        if len(p) == 2:
+            p[0] = p[1] or []
+        else:
+            p[0] = p[1] + (p[2] or [])
 
     def p_struct_declaration_1(self, p):
         """ struct_declaration : specifier_qualifier_list 
struct_declarator_list_opt SEMI
@@ -866,18 +1006,14 @@
         p[0] = decls
 
     def p_struct_declaration_2(self, p):
-        """ struct_declaration : specifier_qualifier_list abstract_declarator 
SEMI
+        """ struct_declaration : SEMI
         """
-        # "Abstract declarator?!", you ask?  Structure members can have the
-        # same names as typedefs.  The trouble is that the member's name gets
-        # grouped into specifier_qualifier_list, leaving any remainder to
-        # appear as an abstract declarator, as in:
-        #   typedef int Foo;
-        #   struct { Foo Foo[3]; };
-        #
-        p[0] = self._build_declarations(
-                spec=p[1],
-                decls=[dict(decl=p[2], init=None)])
+        p[0] = None
+
+    def p_struct_declaration_3(self, p):
+        """ struct_declaration : pppragma_directive
+        """
+        p[0] = [p[1]]
 
     def p_struct_declarator_list(self, p):
         """ struct_declarator_list  : struct_declarator
@@ -906,18 +1042,18 @@
         """ enum_specifier  : ENUM ID
                             | ENUM TYPEID
         """
-        p[0] = c_ast.Enum(p[2], None, self._coord(p.lineno(1)))
+        p[0] = c_ast.Enum(p[2], None, self._token_coord(p, 1))
 
     def p_enum_specifier_2(self, p):
         """ enum_specifier  : ENUM brace_open enumerator_list brace_close
         """
-        p[0] = c_ast.Enum(None, p[3], self._coord(p.lineno(1)))
+        p[0] = c_ast.Enum(None, p[3], self._token_coord(p, 1))
 
     def p_enum_specifier_3(self, p):
         """ enum_specifier  : ENUM ID brace_open enumerator_list brace_close
                             | ENUM TYPEID brace_open enumerator_list 
brace_close
         """
-        p[0] = c_ast.Enum(p[2], p[4], self._coord(p.lineno(1)))
+        p[0] = c_ast.Enum(p[2], p[4], self._token_coord(p, 1))
 
     def p_enumerator_list(self, p):
         """ enumerator_list : enumerator
@@ -939,56 +1075,52 @@
         if len(p) == 2:
             enumerator = c_ast.Enumerator(
                         p[1], None,
-                        self._coord(p.lineno(1)))
+                        self._token_coord(p, 1))
         else:
             enumerator = c_ast.Enumerator(
                         p[1], p[3],
-                        self._coord(p.lineno(1)))
+                        self._token_coord(p, 1))
         self._add_identifier(enumerator.name, enumerator.coord)
 
         p[0] = enumerator
 
-    def p_declarator_1(self, p):
-        """ declarator  : direct_declarator
+    def p_declarator(self, p):
+        """ declarator  : id_declarator
+                        | typeid_declarator
         """
         p[0] = p[1]
 
-    def p_declarator_2(self, p):
-        """ declarator  : pointer direct_declarator
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 
'TYPEID'))
+    def p_xxx_declarator_1(self, p):
+        """ xxx_declarator  : direct_xxx_declarator
+        """
+        p[0] = p[1]
+
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 
'TYPEID'))
+    def p_xxx_declarator_2(self, p):
+        """ xxx_declarator  : pointer direct_xxx_declarator
         """
         p[0] = self._type_modify_decl(p[2], p[1])
 
-    # Since it's impossible for a type to be specified after a pointer, assume
-    # it's intended to be the name for this declaration.  _add_identifier will
-    # raise an error if this TYPEID can't be redeclared.
-    #
-    def p_declarator_3(self, p):
-        """ declarator  : pointer TYPEID
-        """
-        decl = c_ast.TypeDecl(
-            declname=p[2],
-            type=None,
-            quals=None,
-            coord=self._coord(p.lineno(2)))
-
-        p[0] = self._type_modify_decl(decl, p[1])
-
-    def p_direct_declarator_1(self, p):
-        """ direct_declarator   : ID
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 
'TYPEID'))
+    def p_direct_xxx_declarator_1(self, p):
+        """ direct_xxx_declarator   : yyy
         """
         p[0] = c_ast.TypeDecl(
             declname=p[1],
             type=None,
             quals=None,
-            coord=self._coord(p.lineno(1)))
+            coord=self._token_coord(p, 1))
 
-    def p_direct_declarator_2(self, p):
-        """ direct_declarator   : LPAREN declarator RPAREN
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'))
+    def p_direct_xxx_declarator_2(self, p):
+        """ direct_xxx_declarator   : LPAREN xxx_declarator RPAREN
         """
         p[0] = p[2]
 
-    def p_direct_declarator_3(self, p):
-        """ direct_declarator   : direct_declarator LBRACKET 
type_qualifier_list_opt assignment_expression_opt RBRACKET
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 
'TYPEID'))
+    def p_direct_xxx_declarator_3(self, p):
+        """ direct_xxx_declarator   : direct_xxx_declarator LBRACKET 
type_qualifier_list_opt assignment_expression_opt RBRACKET
         """
         quals = (p[3] if len(p) > 5 else []) or []
         # Accept dimension qualifiers
@@ -1001,9 +1133,10 @@
 
         p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
 
-    def p_direct_declarator_4(self, p):
-        """ direct_declarator   : direct_declarator LBRACKET STATIC 
type_qualifier_list_opt assignment_expression RBRACKET
-                                | direct_declarator LBRACKET 
type_qualifier_list STATIC assignment_expression RBRACKET
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 
'TYPEID'))
+    def p_direct_xxx_declarator_4(self, p):
+        """ direct_xxx_declarator   : direct_xxx_declarator LBRACKET STATIC 
type_qualifier_list_opt assignment_expression RBRACKET
+                                    | direct_xxx_declarator LBRACKET 
type_qualifier_list STATIC assignment_expression RBRACKET
         """
         # Using slice notation for PLY objects doesn't work in Python 3 for the
         # version of PLY embedded with pycparser; see PLY Google Code issue 30.
@@ -1022,20 +1155,22 @@
 
     # Special for VLAs
     #
-    def p_direct_declarator_5(self, p):
-        """ direct_declarator   : direct_declarator LBRACKET 
type_qualifier_list_opt TIMES RBRACKET
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 
'TYPEID'))
+    def p_direct_xxx_declarator_5(self, p):
+        """ direct_xxx_declarator   : direct_xxx_declarator LBRACKET 
type_qualifier_list_opt TIMES RBRACKET
         """
         arr = c_ast.ArrayDecl(
             type=None,
-            dim=c_ast.ID(p[4], self._coord(p.lineno(4))),
+            dim=c_ast.ID(p[4], self._token_coord(p, 4)),
             dim_quals=p[3] if p[3] != None else [],
             coord=p[1].coord)
 
         p[0] = self._type_modify_decl(decl=p[1], modifier=arr)
 
-    def p_direct_declarator_6(self, p):
-        """ direct_declarator   : direct_declarator LPAREN parameter_type_list 
RPAREN
-                                | direct_declarator LPAREN identifier_list_opt 
RPAREN
+    @parameterized(('id', 'ID'), ('typeid', 'TYPEID'), ('typeid_noparen', 
'TYPEID'))
+    def p_direct_xxx_declarator_6(self, p):
+        """ direct_xxx_declarator   : direct_xxx_declarator LPAREN 
parameter_type_list RPAREN
+                                    | direct_xxx_declarator LPAREN 
identifier_list_opt RPAREN
         """
         func = c_ast.FuncDecl(
_______________________________________________
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit

Reply via email to