Cool. Do you think you can port this to the modified copy of tokenize.py in 2to3?
(I wonder if this wouldn't be a candidate for 2.6 too.) --Guido On 3/18/07, georg.brandl <[email protected]> wrote: > Author: georg.brandl > Date: Sun Mar 18 20:01:53 2007 > New Revision: 54434 > > Modified: > python/branches/p3yk/Grammar/Grammar > python/branches/p3yk/Include/token.h > python/branches/p3yk/Lib/compiler/transformer.py > python/branches/p3yk/Lib/test/test_grammar.py > python/branches/p3yk/Lib/token.py > python/branches/p3yk/Lib/tokenize.py > python/branches/p3yk/Parser/tokenizer.c > python/branches/p3yk/Python/ast.c > python/branches/p3yk/Python/graminit.c > Log: > Make ELLIPSIS a separate token. This makes it a syntax error to write ". . > ." for Ellipsis. > > > Modified: python/branches/p3yk/Grammar/Grammar > ============================================================================== > --- python/branches/p3yk/Grammar/Grammar (original) > +++ python/branches/p3yk/Grammar/Grammar Sun Mar 18 20:01:53 2007 > @@ -107,7 +107,7 @@ > atom: ('(' [yield_expr|testlist_gexp] ')' | > '[' [listmaker] ']' | > '{' [dictsetmaker] '}' | > - NAME | NUMBER | STRING+ | '.' '.' '.') > + NAME | NUMBER | STRING+ | '...') > listmaker: test ( list_for | (',' test)* [','] ) > testlist_gexp: test ( gen_for | (',' test)* [','] ) > lambdef: 'lambda' [varargslist] ':' test > > Modified: python/branches/p3yk/Include/token.h > ============================================================================== > --- python/branches/p3yk/Include/token.h (original) > +++ python/branches/p3yk/Include/token.h Sun Mar 18 20:01:53 2007 > @@ -59,10 +59,11 @@ > #define DOUBLESLASHEQUAL 49 > #define AT 50 > #define RARROW 51 > +#define ELLIPSIS 52 > /* Don't forget to update the table _PyParser_TokenNames in tokenizer.c! */ > -#define OP 52 > -#define ERRORTOKEN 53 > -#define N_TOKENS 54 > +#define OP 53 > +#define ERRORTOKEN 54 > +#define N_TOKENS 55 > > /* Special definitions for cooperation with parser */ > > > Modified: python/branches/p3yk/Lib/compiler/transformer.py > ============================================================================== > --- python/branches/p3yk/Lib/compiler/transformer.py (original) > +++ python/branches/p3yk/Lib/compiler/transformer.py Sun Mar 18 20:01:53 > 2007 > @@ -113,7 +113,7 @@ > token.LBRACE: self.atom_lbrace, > token.NUMBER: self.atom_number, > token.STRING: self.atom_string, > - token.DOT: self.atom_ellipsis, > + token.ELLIPSIS: self.atom_ellipsis, > token.NAME: self.atom_name, > } > self.encoding = None > > Modified: python/branches/p3yk/Lib/test/test_grammar.py > ============================================================================== > --- python/branches/p3yk/Lib/test/test_grammar.py (original) > +++ python/branches/p3yk/Lib/test/test_grammar.py Sun Mar 18 20:01:53 > 2007 > @@ -121,6 +121,7 @@ > def testEllipsis(self): > x = ... > self.assert_(x is Ellipsis) > + self.assertRaises(SyntaxError, eval, ".. .") > > class GrammarTests(unittest.TestCase): > > > Modified: python/branches/p3yk/Lib/token.py > ============================================================================== > --- python/branches/p3yk/Lib/token.py (original) > +++ python/branches/p3yk/Lib/token.py Sun Mar 18 20:01:53 2007 > @@ -61,9 +61,10 @@ > DOUBLESLASHEQUAL = 49 > AT = 50 > RARROW = 51 > -OP = 52 > -ERRORTOKEN = 53 > -N_TOKENS = 54 > +ELLIPSIS = 52 > +OP = 53 > +ERRORTOKEN = 54 > +N_TOKENS = 55 > NT_OFFSET = 256 > #--end constants-- > > > Modified: python/branches/p3yk/Lib/tokenize.py > ============================================================================== > --- python/branches/p3yk/Lib/tokenize.py (original) > +++ python/branches/p3yk/Lib/tokenize.py Sun Mar 18 20:01:53 2007 > @@ -83,7 +83,7 @@ > r"~") > > Bracket = '[][(){}]' > -Special = group(r'\r?\n', r'[:;.,@]') > +Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]') > Funny = group(Operator, Bracket, Special) > > PlainToken = group(Number, Funny, String, Name) > @@ -334,8 +334,8 @@ > spos, epos, pos = (lnum, start), (lnum, end), end > token, initial = line[start:end], line[start] > > - if initial in numchars or \ > - (initial == '.' and token != '.'): # ordinary number > + if (initial in numchars or # ordinary number > + (initial == '.' and token != '.' and token != '...')): > yield (NUMBER, token, spos, epos, line) > elif initial in '\r\n': > yield (NL if parenlev > 0 else NEWLINE, > > Modified: python/branches/p3yk/Parser/tokenizer.c > ============================================================================== > --- python/branches/p3yk/Parser/tokenizer.c (original) > +++ python/branches/p3yk/Parser/tokenizer.c Sun Mar 18 20:01:53 2007 > @@ -93,6 +93,7 @@ > "DOUBLESLASHEQUAL", > "AT", > "RARROW", > + "ELLIPSIS", > /* This table must match the #defines in token.h! */ > "OP", > "<ERRORTOKEN>", > @@ -1082,6 +1083,16 @@ > break; > } > break; > + case '.': > + switch (c2) { > + case '.': > + switch (c3) { > + case '.': > + return ELLIPSIS; > + } > + break; > + } > + break; > } > return OP; > } > @@ -1278,13 +1289,22 @@ > c = tok_nextc(tok); > if (isdigit(c)) { > goto fraction; > - } > - else { > + } else if (c == '.') { > + c = tok_nextc(tok); > + if (c == '.') { > + *p_start = tok->start; > + *p_end = tok->cur; > + return ELLIPSIS; > + } else { > + tok_backup(tok, c); > + } > + tok_backup(tok, '.'); > + } else { > tok_backup(tok, c); > - *p_start = tok->start; > - *p_end = tok->cur; > - return DOT; > } > + *p_start = tok->start; > + *p_end = tok->cur; > + return DOT; > } > > /* Number */ > > Modified: python/branches/p3yk/Python/ast.c > ============================================================================== > --- python/branches/p3yk/Python/ast.c (original) > +++ python/branches/p3yk/Python/ast.c Sun Mar 18 20:01:53 2007 > @@ -1410,7 +1410,7 @@ > PyArena_AddPyObject(c->c_arena, pynum); > return Num(pynum, LINENO(n), n->n_col_offset, c->c_arena); > } > - case DOT: /* Ellipsis */ > + case ELLIPSIS: /* Ellipsis */ > return Ellipsis(LINENO(n), n->n_col_offset, c->c_arena); > case LPAR: /* some parenthesized expressions */ > ch = CHILD(n, 1); > > Modified: python/branches/p3yk/Python/graminit.c > ============================================================================== > --- python/branches/p3yk/Python/graminit.c (original) > +++ python/branches/p3yk/Python/graminit.c Sun Mar 18 20:01:53 2007 > @@ -1336,19 +1336,19 @@ > {19, 4}, > {154, 4}, > {155, 5}, > - {78, 6}, > + {156, 4}, > }; > static arc arcs_65_1[3] = { > - {48, 7}, > - {147, 7}, > + {48, 6}, > + {147, 6}, > {15, 4}, > }; > static arc arcs_65_2[2] = { > - {149, 8}, > + {149, 7}, > {150, 4}, > }; > static arc arcs_65_3[2] = { > - {152, 9}, > + {152, 8}, > {153, 4}, > }; > static arc arcs_65_4[1] = { > @@ -1359,21 +1359,15 @@ > {0, 5}, > }; > static arc arcs_65_6[1] = { > - {78, 10}, > -}; > -static arc arcs_65_7[1] = { > {15, 4}, > }; > -static arc arcs_65_8[1] = { > +static arc arcs_65_7[1] = { > {150, 4}, > }; > -static arc arcs_65_9[1] = { > +static arc arcs_65_8[1] = { > {153, 4}, > }; > -static arc arcs_65_10[1] = { > - {78, 4}, > -}; > -static state states_65[11] = { > +static state states_65[9] = { > {7, arcs_65_0}, > {3, arcs_65_1}, > {2, arcs_65_2}, > @@ -1383,14 +1377,12 @@ > {1, arcs_65_6}, > {1, arcs_65_7}, > {1, arcs_65_8}, > - {1, arcs_65_9}, > - {1, arcs_65_10}, > }; > static arc arcs_66_0[1] = { > {22, 1}, > }; > static arc arcs_66_1[3] = { > - {156, 2}, > + {157, 2}, > {28, 3}, > {0, 1}, > }; > @@ -1416,7 +1408,7 @@ > {22, 1}, > }; > static arc arcs_67_1[3] = { > - {157, 2}, > + {158, 2}, > {28, 3}, > {0, 1}, > }; > @@ -1471,7 +1463,7 @@ > {15, 5}, > }; > static arc arcs_69_2[1] = { > - {158, 6}, > + {159, 6}, > }; > static arc arcs_69_3[1] = { > {19, 5}, > @@ -1495,14 +1487,14 @@ > {1, arcs_69_6}, > }; > static arc arcs_70_0[1] = { > - {159, 1}, > + {160, 1}, > }; > static arc arcs_70_1[2] = { > {28, 2}, > {0, 1}, > }; > static arc arcs_70_2[2] = { > - {159, 1}, > + {160, 1}, > {0, 2}, > }; > static state states_70[3] = { > @@ -1520,11 +1512,11 @@ > }; > static arc arcs_71_2[3] = { > {22, 3}, > - {160, 4}, > + {161, 4}, > {0, 2}, > }; > static arc arcs_71_3[2] = { > - {160, 4}, > + {161, 4}, > {0, 3}, > }; > static arc arcs_71_4[1] = { > @@ -1625,7 +1617,7 @@ > {1, arcs_75_7}, > }; > static arc arcs_76_0[1] = { > - {161, 1}, > + {162, 1}, > }; > static arc arcs_76_1[1] = { > {19, 2}, > @@ -1661,7 +1653,7 @@ > {1, arcs_76_7}, > }; > static arc arcs_77_0[3] = { > - {162, 1}, > + {163, 1}, > {29, 2}, > {31, 3}, > }; > @@ -1676,7 +1668,7 @@ > {22, 6}, > }; > static arc arcs_77_4[4] = { > - {162, 1}, > + {163, 1}, > {29, 2}, > {31, 3}, > {0, 4}, > @@ -1705,7 +1697,7 @@ > {22, 1}, > }; > static arc arcs_78_1[3] = { > - {157, 2}, > + {158, 2}, > {27, 3}, > {0, 1}, > }; > @@ -1722,8 +1714,8 @@ > {1, arcs_78_3}, > }; > static arc arcs_79_0[2] = { > - {156, 1}, > - {164, 1}, > + {157, 1}, > + {165, 1}, > }; > static arc arcs_79_1[1] = { > {0, 1}, > @@ -1745,7 +1737,7 @@ > {107, 4}, > }; > static arc arcs_80_4[2] = { > - {163, 5}, > + {164, 5}, > {0, 4}, > }; > static arc arcs_80_5[1] = { > @@ -1766,7 +1758,7 @@ > {108, 2}, > }; > static arc arcs_81_2[2] = { > - {163, 3}, > + {164, 3}, > {0, 2}, > }; > static arc arcs_81_3[1] = { > @@ -1779,8 +1771,8 @@ > {1, arcs_81_3}, > }; > static arc arcs_82_0[2] = { > - {157, 1}, > - {166, 1}, > + {158, 1}, > + {167, 1}, > }; > static arc arcs_82_1[1] = { > {0, 1}, > @@ -1802,7 +1794,7 @@ > {109, 4}, > }; > static arc arcs_83_4[2] = { > - {165, 5}, > + {166, 5}, > {0, 4}, > }; > static arc arcs_83_5[1] = { > @@ -1823,7 +1815,7 @@ > {108, 2}, > }; > static arc arcs_84_2[2] = { > - {165, 3}, > + {166, 3}, > {0, 2}, > }; > static arc arcs_84_3[1] = { > @@ -1857,7 +1849,7 @@ > {1, arcs_86_1}, > }; > static arc arcs_87_0[1] = { > - {169, 1}, > + {170, 1}, > }; > static arc arcs_87_1[2] = { > {9, 2}, > @@ -1873,11 +1865,11 @@ > }; > static dfa dfas[88] = { > {256, "single_input", 0, 3, states_0, > - > "\004\050\014\000\000\000\000\240\340\151\070\220\045\200\040\000\000\206\220\014\002\002"}, > + > "\004\050\014\000\000\000\000\240\340\051\070\220\045\200\040\000\000\206\220\034\004\004"}, > {257, "file_input", 0, 2, states_1, > - > "\204\050\014\000\000\000\000\240\340\151\070\220\045\200\040\000\000\206\220\014\002\002"}, > + > "\204\050\014\000\000\000\000\240\340\051\070\220\045\200\040\000\000\206\220\034\004\004"}, > {258, "eval_input", 0, 3, states_2, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {259, "decorator", 0, 7, states_3, > > "\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {260, "decorators", 0, 2, states_4, > @@ -1903,13 +1895,13 @@ > {270, "vfplist", 0, 3, states_14, > > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {271, "stmt", 0, 2, states_15, > - > "\000\050\014\000\000\000\000\240\340\151\070\220\045\200\040\000\000\206\220\014\002\002"}, > + > "\000\050\014\000\000\000\000\240\340\051\070\220\045\200\040\000\000\206\220\034\004\004"}, > {272, "simple_stmt", 0, 4, states_16, > - > "\000\040\010\000\000\000\000\240\340\151\070\000\000\200\040\000\000\206\220\014\000\002"}, > + > "\000\040\010\000\000\000\000\240\340\051\070\000\000\200\040\000\000\206\220\034\000\004"}, > {273, "small_stmt", 0, 2, states_17, > - > "\000\040\010\000\000\000\000\240\340\151\070\000\000\200\040\000\000\206\220\014\000\002"}, > + > "\000\040\010\000\000\000\000\240\340\051\070\000\000\200\040\000\000\206\220\034\000\004"}, > {274, "expr_stmt", 0, 6, states_18, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {275, "augassign", 0, 2, states_19, > > "\000\000\000\000\000\000\376\037\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {276, "del_stmt", 0, 3, states_20, > @@ -1917,7 +1909,7 @@ > {277, "pass_stmt", 0, 2, states_21, > > "\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {278, "flow_stmt", 0, 2, states_22, > - > "\000\000\000\000\000\000\000\000\340\001\000\000\000\000\000\000\000\000\000\000\000\002"}, > + > "\000\000\000\000\000\000\000\000\340\001\000\000\000\000\000\000\000\000\000\000\000\004"}, > {279, "break_stmt", 0, 2, states_23, > > "\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {280, "continue_stmt", 0, 2, states_24, > @@ -1925,7 +1917,7 @@ > {281, "return_stmt", 0, 3, states_25, > > "\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {282, "yield_stmt", 0, 2, states_26, > - > "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"}, > + > "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\004"}, > {283, "raise_stmt", 0, 7, states_27, > > "\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000"}, > {284, "import_stmt", 0, 2, states_28, > @@ -1951,7 +1943,7 @@ > {294, "assert_stmt", 0, 5, states_38, > > "\000\000\000\000\000\000\000\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000"}, > {295, "compound_stmt", 0, 2, states_39, > - > "\000\010\004\000\000\000\000\000\000\000\000\220\045\000\000\000\000\000\000\000\002\000"}, > + > "\000\010\004\000\000\000\000\000\000\000\000\220\045\000\000\000\000\000\000\000\004\000"}, > {296, "if_stmt", 0, 8, states_40, > > "\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"}, > {297, "while_stmt", 0, 8, states_41, > @@ -1967,69 +1959,69 @@ > {302, "except_clause", 0, 5, states_46, > > "\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000\000"}, > {303, "suite", 0, 5, states_47, > - > "\004\040\010\000\000\000\000\240\340\151\070\000\000\200\040\000\000\206\220\014\000\002"}, > + > "\004\040\010\000\000\000\000\240\340\051\070\000\000\200\040\000\000\206\220\034\000\004"}, > {304, "testlist_safe", 0, 5, states_48, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {305, "old_test", 0, 2, states_49, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {306, "old_lambdef", 0, 5, states_50, > > "\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000"}, > {307, "test", 0, 6, states_51, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {308, "or_test", 0, 2, states_52, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\040\000\000\206\220\034\000\000"}, > {309, "and_test", 0, 2, states_53, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\040\000\000\206\220\034\000\000"}, > {310, "not_test", 0, 3, states_54, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\040\000\000\206\220\034\000\000"}, > {311, "comparison", 0, 2, states_55, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {312, "comp_op", 0, 4, states_56, > > "\000\000\000\000\000\000\000\000\000\000\000\000\002\000\040\177\000\000\000\000\000\000"}, > {313, "expr", 0, 2, states_57, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {314, "xor_expr", 0, 2, states_58, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {315, "and_expr", 0, 2, states_59, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {316, "shift_expr", 0, 2, states_60, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {317, "arith_expr", 0, 2, states_61, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {318, "term", 0, 2, states_62, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {319, "factor", 0, 3, states_63, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {320, "power", 0, 4, states_64, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\220\014\000\000"}, > - {321, "atom", 0, 11, states_65, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\220\034\000\000"}, > + {321, "atom", 0, 9, states_65, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\220\034\000\000"}, > {322, "listmaker", 0, 5, states_66, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {323, "testlist_gexp", 0, 5, states_67, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {324, "lambdef", 0, 5, states_68, > > "\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000\000"}, > {325, "trailer", 0, 7, states_69, > > "\000\040\000\000\000\000\000\000\000\100\000\000\000\000\000\000\000\000\020\000\000\000"}, > {326, "subscriptlist", 0, 3, states_70, > - > "\000\040\210\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\210\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {327, "subscript", 0, 5, states_71, > - > "\000\040\210\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\210\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {328, "sliceop", 0, 3, states_72, > > "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {329, "exprlist", 0, 3, states_73, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\000\000\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\206\220\034\000\000"}, > {330, "testlist", 0, 3, states_74, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {331, "dictsetmaker", 0, 8, states_75, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {332, "classdef", 0, 8, states_76, > - > "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000"}, > + > "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\004\000"}, > {333, "arglist", 0, 8, states_77, > - > "\000\040\010\240\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\240\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {334, "argument", 0, 4, states_78, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {335, "list_iter", 0, 2, states_79, > > "\000\000\000\000\000\000\000\000\000\000\000\020\001\000\000\000\000\000\000\000\000\000"}, > {336, "list_for", 0, 6, states_80, > @@ -2043,13 +2035,13 @@ > {340, "gen_if", 0, 4, states_84, > > "\000\000\000\000\000\000\000\000\000\000\000\020\000\000\000\000\000\000\000\000\000\000"}, > {341, "testlist1", 0, 2, states_85, > - > "\000\040\010\000\000\000\000\000\000\100\000\000\000\200\040\000\000\206\220\014\000\000"}, > + > "\000\040\010\000\000\000\000\000\000\000\000\000\000\200\040\000\000\206\220\034\000\000"}, > {342, "encoding_decl", 0, 2, states_86, > > "\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, > {343, "yield_expr", 0, 3, states_87, > - > "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002"}, > + > "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\004"}, > }; > -static label labels[170] = { > +static label labels[171] = { > {0, "EMPTY"}, > {256, 0}, > {4, 0}, > @@ -2206,6 +2198,7 @@ > {27, 0}, > {2, 0}, > {3, 0}, > + {52, 0}, > {336, 0}, > {339, 0}, > {326, 0}, > @@ -2224,6 +2217,6 @@ > grammar _PyParser_Grammar = { > 88, > dfas, > - {170, labels}, > + {171, labels}, > 256 > }; > _______________________________________________ > Python-3000-checkins mailing list > [email protected] > http://mail.python.org/mailman/listinfo/python-3000-checkins > -- --Guido van Rossum (home page: http://www.python.org/~guido/) _______________________________________________ Python-3000-checkins mailing list [email protected] http://mail.python.org/mailman/listinfo/python-3000-checkins
