Author: Richard Plangger <planri...@gmail.com>
Branch: py3.5
Changeset: r85027:f03edd99abf9
Date: 2016-06-08 12:30 +0200
http://bitbucket.org/pypy/pypy/changeset/f03edd99abf9/

Log:    reduced diff to cpython's Python.asdl (MatMul -> MatMult) copied
        over asdl.py from cpython (3.5)

diff --git a/pypy/interpreter/astcompiler/tools/Python.asdl 
b/pypy/interpreter/astcompiler/tools/Python.asdl
--- a/pypy/interpreter/astcompiler/tools/Python.asdl
+++ b/pypy/interpreter/astcompiler/tools/Python.asdl
@@ -91,8 +91,8 @@
 
     boolop = And | Or
 
-    operator = Add | Sub | Mult | Div | Mod | Pow | LShift 
-                 | RShift | BitOr | BitXor | BitAnd | FloorDiv | MatMul
+    operator = Add | Sub | Mult | MatMult | Div | Mod | Pow | LShift
+                 | RShift | BitOr | BitXor | BitAnd | FloorDiv
 
     unaryop = Invert | Not | UAdd | USub
 
diff --git a/pypy/interpreter/astcompiler/tools/asdl.py 
b/pypy/interpreter/astcompiler/tools/asdl.py
--- a/pypy/interpreter/astcompiler/tools/asdl.py
+++ b/pypy/interpreter/astcompiler/tools/asdl.py
@@ -1,235 +1,52 @@
-"""An implementation of the Zephyr Abstract Syntax Definition Language.
+#-------------------------------------------------------------------------------
+# Parser for ASDL [1] definition files. Reads in an ASDL description and parses
+# it into an AST that describes it.
+#
+# The EBNF we're parsing here: Figure 1 of the paper [1]. Extended to support
+# modules and attributes after a product. Words starting with Capital letters
+# are terminals. Literal tokens are in "double quotes". Others are
+# non-terminals. Id is either TokenId or ConstructorId.
+#
+# module        ::= "module" Id "{" [definitions] "}"
+# definitions   ::= { TypeId "=" type }
+# type          ::= product | sum
+# product       ::= fields ["attributes" fields]
+# fields        ::= "(" { field, "," } field ")"
+# field         ::= TypeId ["?" | "*"] [Id]
+# sum           ::= constructor { "|" constructor } ["attributes" fields]
+# constructor   ::= ConstructorId [fields]
+#
+# [1] "The Zephyr Abstract Syntax Description Language" by Wang, et. al. See
+#     http://asdl.sourceforge.net/
+#-------------------------------------------------------------------------------
+from collections import namedtuple
+import re
 
-See http://asdl.sourceforge.net/ and
-http://www.cs.princeton.edu/~danwang/Papers/dsl97/dsl97-abstract.html.
+__all__ = [
+    'builtin_types', 'parse', 'AST', 'Module', 'Type', 'Constructor',
+    'Field', 'Sum', 'Product', 'VisitorBase', 'Check', 'check']
 
-Only supports top level module decl, not view.  I'm guessing that view
-is intended to support the browser and I'm not interested in the
-browser.
+# The following classes define nodes into which the ASDL description is parsed.
+# Note: this is a "meta-AST". ASDL files (such as Python.asdl) describe the AST
+# structure used by a programming language. But ASDL files themselves need to 
be
+# parsed. This module parses ASDL files and uses a simple AST to represent 
them.
+# See the EBNF at the top of the file to understand the logical connection
+# between the various node types.
 
-Changes for Python: Add support for module versions
-"""
+builtin_types = {'identifier', 'string', 'bytes', 'int', 'object', 'singleton'}
 
-import os
-import traceback
-
-import spark
-
-class Token(object):
-    # spark seems to dispatch in the parser based on a token's
-    # type attribute
-    def __init__(self, type, lineno):
-        self.type = type
-        self.lineno = lineno
-
-    def __str__(self):
-        return self.type
-
+class AST:
     def __repr__(self):
-        return str(self)
-
-class Id(Token):
-    def __init__(self, value, lineno):
-        self.type = 'Id'
-        self.value = value
-        self.lineno = lineno
-
-    def __str__(self):
-        return self.value
-
-class String(Token):
-    def __init__(self, value, lineno):
-        self.type = 'String'
-        self.value = value
-        self.lineno = lineno
-
-class ASDLSyntaxError(Exception):
-
-    def __init__(self, lineno, token=None, msg=None):
-        self.lineno = lineno
-        self.token = token
-        self.msg = msg
-
-    def __str__(self):
-        if self.msg is None:
-            return "Error at '%s', line %d" % (self.token, self.lineno)
-        else:
-            return "%s, line %d" % (self.msg, self.lineno)
-
-class ASDLScanner(spark.GenericScanner, object):
-
-    def tokenize(self, input):
-        self.rv = []
-        self.lineno = 1
-        super(ASDLScanner, self).tokenize(input)
-        return self.rv
-
-    def t_id(self, s):
-        r"[\w\.]+"
-        # XXX doesn't distinguish upper vs. lower, which is
-        # significant for ASDL.
-        self.rv.append(Id(s, self.lineno))
-
-    def t_string(self, s):
-        r'"[^"]*"'
-        self.rv.append(String(s, self.lineno))
-
-    def t_xxx(self, s): # not sure what this production means
-        r"<="
-        self.rv.append(Token(s, self.lineno))
-
-    def t_punctuation(self, s):
-        r"[\{\}\*\=\|\(\)\,\?\:]"
-        self.rv.append(Token(s, self.lineno))
-
-    def t_comment(self, s):
-        r"\-\-[^\n]*"
-        pass
-
-    def t_newline(self, s):
-        r"\n"
-        self.lineno += 1
-
-    def t_whitespace(self, s):
-        r"[ \t]+"
-        pass
-
-    def t_default(self, s):
-        r" . +"
-        raise ValueError, "unmatched input: %s" % `s`
-
-class ASDLParser(spark.GenericParser, object):
-    def __init__(self):
-        super(ASDLParser, self).__init__("module")
-
-    def typestring(self, tok):
-        return tok.type
-
-    def error(self, tok):
-        raise ASDLSyntaxError(tok.lineno, tok)
-
-    def p_module_0(self, (module, name, _0, _1)):
-        " module ::= Id Id { } "
-        if module.value != "module":
-            raise ASDLSyntaxError(module.lineno,
-                                  msg="expected 'module', found %s" % module)
-        return Module(name, None)
-
-    def p_module(self, (module, name, _0, definitions, _1)):
-        " module ::= Id Id { definitions } "
-        if module.value != "module":
-            raise ASDLSyntaxError(module.lineno,
-                                  msg="expected 'module', found %s" % module)
-        return Module(name, definitions)
-
-    def p_definition_0(self, (definition,)):
-        " definitions ::= definition "
-        return definition
-
-    def p_definition_1(self, (definitions, definition)):
-        " definitions ::= definition definitions "
-        return definitions + definition
-
-    def p_definition(self, (id, _, type)):
-        " definition ::= Id = type "
-        return [Type(id, type)]
-
-    def p_type_0(self, (product,)):
-        " type ::= product "
-        return product
-
-    def p_type_1(self, (sum,)):
-        " type ::= sum "
-        return Sum(sum)
-
-    def p_type_2(self, (sum, id, _0, attributes, _1)):
-        " type ::= sum Id ( fields ) "
-        if id.value != "attributes":
-            raise ASDLSyntaxError(id.lineno,
-                                  msg="expected attributes, found %s" % id)
-        if attributes:
-            attributes.reverse()
-        return Sum(sum, attributes)
-
-    def p_product(self, (_0, fields, _1)):
-        " product ::= ( fields ) "
-        # XXX can't I just construct things in the right order?
-        fields.reverse()
-        return Product(fields)
-
-    def p_sum_0(self, (constructor,)):
-        " sum ::= constructor "
-        return [constructor]
-
-    def p_sum_1(self, (constructor, _, sum)):
-        " sum ::= constructor | sum "
-        return [constructor] + sum
-
-    def p_sum_2(self, (constructor, _, sum)):
-        " sum ::= constructor | sum "
-        return [constructor] + sum
-
-    def p_constructor_0(self, (id,)):
-        " constructor ::= Id "
-        return Constructor(id)
-
-    def p_constructor_1(self, (id, _0, fields, _1)):
-        " constructor ::= Id ( fields ) "
-        # XXX can't I just construct things in the right order?
-        fields.reverse()
-        return Constructor(id, fields)
-
-    def p_fields_0(self, (field,)):
-        " fields ::= field "
-        return [field]
-
-    def p_fields_1(self, (field, _, fields)):
-        " fields ::= field , fields "
-        return fields + [field]
-
-    def p_field_0(self, (type,)):
-        " field ::= Id "
-        return Field(type)
-
-    def p_field_1(self, (type, name)):
-        " field ::= Id Id "
-        return Field(type, name)
-
-    def p_field_2(self, (type, _, name)):
-        " field ::= Id * Id "
-        return Field(type, name, seq=True)
-
-    def p_field_3(self, (type, _, name)):
-        " field ::= Id ? Id "
-        return Field(type, name, opt=True)
-
-    def p_field_4(self, (type, _)):
-        " field ::= Id * "
-        return Field(type, seq=True)
-
-    def p_field_5(self, (type, _)):
-        " field ::= Id ? "
-        return Field(type, opt=True)
-
-builtin_types = ("identifier", "string", "int", "bool", "object")
-
-# below is a collection of classes to capture the AST of an AST :-)
-# not sure if any of the methods are useful yet, but I'm adding them
-# piecemeal as they seem helpful
-
-class AST(object):
-    pass # a marker class
+        raise NotImplementedError
 
 class Module(AST):
     def __init__(self, name, dfns):
         self.name = name
         self.dfns = dfns
-        self.types = {} # maps type name to value (from dfns)
-        for type in dfns:
-            self.types[type.name.value] = type.value
+        self.types = {type.name: type.value for type in dfns}
 
     def __repr__(self):
-        return "Module(%s, %s)" % (self.name, self.dfns)
+        return 'Module({0.name}, {0.dfns})'.format(self)
 
 class Type(AST):
     def __init__(self, name, value):
@@ -237,7 +54,7 @@
         self.value = value
 
     def __repr__(self):
-        return "Type(%s, %s)" % (self.name, self.value)
+        return 'Type({0.name}, {0.value})'.format(self)
 
 class Constructor(AST):
     def __init__(self, name, fields=None):
@@ -245,7 +62,7 @@
         self.fields = fields or []
 
     def __repr__(self):
-        return "Constructor(%s, %s)" % (self.name, self.fields)
+        return 'Constructor({0.name}, {0.fields})'.format(self)
 
 class Field(AST):
     def __init__(self, type, name=None, seq=False, opt=False):
@@ -262,9 +79,9 @@
         else:
             extra = ""
         if self.name is None:
-            return "Field(%s%s)" % (self.type, extra)
+            return 'Field({0.type}{1})'.format(self, extra)
         else:
-            return "Field(%s, %s%s)" % (self.type, self.name, extra)
+            return 'Field({0.type}, {0.name}{1})'.format(self, extra)
 
 class Sum(AST):
     def __init__(self, types, attributes=None):
@@ -272,47 +89,54 @@
         self.attributes = attributes or []
 
     def __repr__(self):
-        if self.attributes is None:
-            return "Sum(%s)" % self.types
+        if self.attributes:
+            return 'Sum({0.types}, {0.attributes})'.format(self)
         else:
-            return "Sum(%s, %s)" % (self.types, self.attributes)
+            return 'Sum({0.types})'.format(self)
 
 class Product(AST):
-    def __init__(self, fields):
+    def __init__(self, fields, attributes=None):
         self.fields = fields
+        self.attributes = attributes or []
 
     def __repr__(self):
-        return "Product(%s)" % self.fields
+        if self.attributes:
+            return 'Product({0.fields}, {0.attributes})'.format(self)
+        else:
+            return 'Product({0.fields})'.format(self)
+
+# A generic visitor for the meta-AST that describes ASDL. This can be used by
+# emitters. Note that this visitor does not provide a generic visit method, so 
a
+# subclass needs to define visit methods from visitModule to as deep as the
+# interesting node.
+# We also define a Check visitor that makes sure the parsed ASDL is 
well-formed.
 
 class VisitorBase(object):
+    """Generic tree visitor for ASTs."""
+    def __init__(self):
+        self.cache = {}
 
-    def __init__(self, skip=False):
-        self.cache = {}
-        self.skip = skip
-
-    def visit(self, object, *args):
-        meth = self._dispatch(object)
-        if meth is None:
-            return
-        meth(object, *args)
-
-    def _dispatch(self, object):
-        assert isinstance(object, AST), repr(object)
-        klass = object.__class__
+    def visit(self, obj, *args):
+        klass = obj.__class__
         meth = self.cache.get(klass)
         if meth is None:
             methname = "visit" + klass.__name__
-            if self.skip:
-                meth = getattr(self, methname, None)
-            else:
-                meth = getattr(self, methname)
+            meth = getattr(self, methname, None)
             self.cache[klass] = meth
-        return meth
+        if meth:
+            try:
+                meth(obj, *args)
+            except Exception as e:
+                print("Error visiting %r: %s" % (obj, e))
+                raise
 
 class Check(VisitorBase):
+    """A visitor that checks a parsed ASDL tree for correctness.
 
+    Errors are printed and accumulated.
+    """
     def __init__(self):
-        super(Check, self).__init__(skip=True)
+        super(Check, self).__init__()
         self.cons = {}
         self.errors = 0
         self.types = {}
@@ -334,8 +158,8 @@
         if conflict is None:
             self.cons[key] = name
         else:
-            print "Redefinition of constructor %s" % key
-            print "Defined in %s and %s" % (conflict, name)
+            print('Redefinition of constructor {}'.format(key))
+            print('Defined in {} and {}'.format(conflict, name))
             self.errors += 1
         for f in cons.fields:
             self.visit(f, key)
@@ -350,6 +174,11 @@
             self.visit(f, name)
 
 def check(mod):
+    """Check the parsed ASDL tree for correctness.
+
+    Return True if success. For failure, the errors are printed out and False
+    is returned.
+    """
     v = Check()
     v.visit(mod)
 
@@ -357,40 +186,190 @@
         if t not in mod.types and not t in builtin_types:
             v.errors += 1
             uses = ", ".join(v.types[t])
-            print "Undefined type %s, used in %s" % (t, uses)
-
+            print('Undefined type {}, used in {}'.format(t, uses))
     return not v.errors
 
-def parse(file):
-    scanner = ASDLScanner()
-    parser = ASDLParser()
+# The ASDL parser itself comes next. The only interesting external interface
+# here is the top-level parse function.
 
-    buf = open(file).read()
-    tokens = scanner.tokenize(buf)
-    try:
-        return parser.parse(tokens)
-    except ASDLSyntaxError, err:
-        print err
-        lines = buf.split("\n")
-        print lines[err.lineno - 1] # lines starts at 0, files at 1
+def parse(filename):
+    """Parse ASDL from the given file and return a Module node describing 
it."""
+    with open(filename) as f:
+        parser = ASDLParser()
+        return parser.parse(f.read())
 
-if __name__ == "__main__":
-    import glob
-    import sys
+# Types for describing tokens in an ASDL specification.
+class TokenKind:
+    """TokenKind is provides a scope for enumerated token kinds."""
+    (ConstructorId, TypeId, Equals, Comma, Question, Pipe, Asterisk,
+     LParen, RParen, LBrace, RBrace) = range(11)
 
-    if len(sys.argv) > 1:
-        files = sys.argv[1:]
-    else:
-        testdir = "tests"
-        files = glob.glob(testdir + "/*.asdl")
+    operator_table = {
+        '=': Equals, ',': Comma,    '?': Question, '|': Pipe,    '(': LParen,
+        ')': RParen, '*': Asterisk, '{': LBrace,   '}': RBrace}
 
-    for file in files:
-        print file
-        mod = parse(file)
-        print "module", mod.name
-        print len(mod.dfns), "definitions"
-        if not check(mod):
-            print "Check failed"
+Token = namedtuple('Token', 'kind value lineno')
+
+class ASDLSyntaxError(Exception):
+    def __init__(self, msg, lineno=None):
+        self.msg = msg
+        self.lineno = lineno or '<unknown>'
+
+    def __str__(self):
+        return 'Syntax error on line {0.lineno}: {0.msg}'.format(self)
+
+def tokenize_asdl(buf):
+    """Tokenize the given buffer. Yield Token objects."""
+    for lineno, line in enumerate(buf.splitlines(), 1):
+        for m in re.finditer(r'\s*(\w+|--.*|.)', line.strip()):
+            c = m.group(1)
+            if c[0].isalpha():
+                # Some kind of identifier
+                if c[0].isupper():
+                    yield Token(TokenKind.ConstructorId, c, lineno)
+                else:
+                    yield Token(TokenKind.TypeId, c, lineno)
+            elif c[:2] == '--':
+                # Comment
+                break
+            else:
+                # Operators
+                try:
+                    op_kind = TokenKind.operator_table[c]
+                except KeyError:
+                    raise ASDLSyntaxError('Invalid operator %s' % c, lineno)
+                yield Token(op_kind, c, lineno)
+
+class ASDLParser:
+    """Parser for ASDL files.
+
+    Create, then call the parse method on a buffer containing ASDL.
+    This is a simple recursive descent parser that uses tokenize_asdl for the
+    lexing.
+    """
+    def __init__(self):
+        self._tokenizer = None
+        self.cur_token = None
+
+    def parse(self, buf):
+        """Parse the ASDL in the buffer and return an AST with a Module root.
+        """
+        self._tokenizer = tokenize_asdl(buf)
+        self._advance()
+        return self._parse_module()
+
+    def _parse_module(self):
+        if self._at_keyword('module'):
+            self._advance()
         else:
-            for dfn in mod.dfns:
-                print dfn.type
+            raise ASDLSyntaxError(
+                'Expected "module" (found {})'.format(self.cur_token.value),
+                self.cur_token.lineno)
+        name = self._match(self._id_kinds)
+        self._match(TokenKind.LBrace)
+        defs = self._parse_definitions()
+        self._match(TokenKind.RBrace)
+        return Module(name, defs)
+
+    def _parse_definitions(self):
+        defs = []
+        while self.cur_token.kind == TokenKind.TypeId:
+            typename = self._advance()
+            self._match(TokenKind.Equals)
+            type = self._parse_type()
+            defs.append(Type(typename, type))
+        return defs
+
+    def _parse_type(self):
+        if self.cur_token.kind == TokenKind.LParen:
+            # If we see a (, it's a product
+            return self._parse_product()
+        else:
+            # Otherwise it's a sum. Look for ConstructorId
+            sumlist = [Constructor(self._match(TokenKind.ConstructorId),
+                                   self._parse_optional_fields())]
+            while self.cur_token.kind  == TokenKind.Pipe:
+                # More constructors
+                self._advance()
+                sumlist.append(Constructor(
+                                self._match(TokenKind.ConstructorId),
+                                self._parse_optional_fields()))
+            return Sum(sumlist, self._parse_optional_attributes())
+
+    def _parse_product(self):
+        return Product(self._parse_fields(), self._parse_optional_attributes())
+
+    def _parse_fields(self):
+        fields = []
+        self._match(TokenKind.LParen)
+        while self.cur_token.kind == TokenKind.TypeId:
+            typename = self._advance()
+            is_seq, is_opt = self._parse_optional_field_quantifier()
+            id = (self._advance() if self.cur_token.kind in self._id_kinds
+                                  else None)
+            fields.append(Field(typename, id, seq=is_seq, opt=is_opt))
+            if self.cur_token.kind == TokenKind.RParen:
+                break
+            elif self.cur_token.kind == TokenKind.Comma:
+                self._advance()
+        self._match(TokenKind.RParen)
+        return fields
+
+    def _parse_optional_fields(self):
+        if self.cur_token.kind == TokenKind.LParen:
+            return self._parse_fields()
+        else:
+            return None
+
+    def _parse_optional_attributes(self):
+        if self._at_keyword('attributes'):
+            self._advance()
+            return self._parse_fields()
+        else:
+            return None
+
+    def _parse_optional_field_quantifier(self):
+        is_seq, is_opt = False, False
+        if self.cur_token.kind == TokenKind.Asterisk:
+            is_seq = True
+            self._advance()
+        elif self.cur_token.kind == TokenKind.Question:
+            is_opt = True
+            self._advance()
+        return is_seq, is_opt
+
+    def _advance(self):
+        """ Return the value of the current token and read the next one into
+            self.cur_token.
+        """
+        cur_val = None if self.cur_token is None else self.cur_token.value
+        try:
+            self.cur_token = next(self._tokenizer)
+        except StopIteration:
+            self.cur_token = None
+        return cur_val
+
+    _id_kinds = (TokenKind.ConstructorId, TokenKind.TypeId)
+
+    def _match(self, kind):
+        """The 'match' primitive of RD parsers.
+
+        * Verifies that the current token is of the given kind (kind can
+          be a tuple, in which the kind must match one of its members).
+        * Returns the value of the current token
+        * Reads in the next token
+        """
+        if (isinstance(kind, tuple) and self.cur_token.kind in kind or
+            self.cur_token.kind == kind
+            ):
+            value = self.cur_token.value
+            self._advance()
+            return value
+        else:
+            raise ASDLSyntaxError(
+                'Unmatched {} (found {})'.format(kind, self.cur_token.kind),
+                self.cur_token.lineno)
+
+    def _at_keyword(self, keyword):
+        return (self.cur_token.kind == TokenKind.TypeId and
+                self.cur_token.value == keyword)
_______________________________________________
pypy-commit mailing list
pypy-commit@python.org
https://mail.python.org/mailman/listinfo/pypy-commit

Reply via email to