diff --git a/Grammar/python.gram b/Grammar/python.gram index 0ce6ab4b4ba908..904e05c6d317d1 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -170,11 +170,11 @@ for_stmt[stmt_ty]: CHECK_VERSION(5, "Async for loops are", _Py_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) } with_stmt[stmt_ty]: - | 'with' '(' a=','.with_item+ ')' ':' b=block { + | 'with' '(' a=','.with_item+ ','? ')' ':' b=block { _Py_With(a, b, NULL, EXTRA) } | 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { _Py_With(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | ASYNC 'with' '(' a=','.with_item+ ')' ':' b=block { + | ASYNC 'with' '(' a=','.with_item+ ','? ')' ':' b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NULL, EXTRA)) } | ASYNC 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) } diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 922a5166ec12f7..c24d3529490be0 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1,7 +1,7 @@ # Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. -from test.support import check_syntax_error, check_syntax_warning +from test.support import check_syntax_error, check_syntax_warning, use_old_parser import inspect import unittest import sys @@ -1694,6 +1694,70 @@ def __exit__(self, *args): with manager() as x, manager(): pass + if not use_old_parser(): + test_cases = [ + """if 1: + with ( + manager() + ): + pass + """, + """if 1: + with ( + manager() as x + ): + pass + """, + """if 1: + with ( + manager() as (x, y), + manager() as z, + ): + pass + """, + """if 1: + with ( + manager(), + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager() as z, + ): + pass + """, + """if 1: + with ( + manager() as x, + manager() as y, + manager(), + ): + pass + """, + ] + for case in test_cases: + with self.subTest(case=case): + compile(case, "", "exec") + + def test_if_else_expr(self): # Test ifelse expressions in various cases def _checkeval(msg, ret): diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 55605d5770f1e6..dd985aa70f511e 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -3031,9 +3031,9 @@ for_stmt_rule(Parser *p) } // with_stmt: -// | 'with' '(' ','.with_item+ ')' ':' block +// | 'with' '(' ','.with_item+ ','? ')' ':' block // | 'with' ','.with_item+ ':' TYPE_COMMENT? block -// | ASYNC 'with' '(' ','.with_item+ ')' ':' block +// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block // | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block static stmt_ty with_stmt_rule(Parser *p) @@ -3051,13 +3051,15 @@ with_stmt_rule(Parser *p) UNUSED(start_lineno); // Only used by EXTRA macro int start_col_offset = p->tokens[mark]->col_offset; UNUSED(start_col_offset); // Only used by EXTRA macro - { // 'with' '(' ','.with_item+ ')' ':' block + { // 'with' '(' ','.with_item+ ','? ')' ':' block asdl_seq * a; asdl_seq* b; Token * keyword; Token * literal; Token * literal_1; Token * literal_2; + void *opt_var; + UNUSED(opt_var); // Silence compiler warnings if ( (keyword = _PyPegen_expect_token(p, 519)) && @@ -3065,6 +3067,8 @@ with_stmt_rule(Parser *p) && (a = _gather_38_rule(p)) && + (opt_var = _PyPegen_expect_token(p, 12), 1) + && (literal_1 = _PyPegen_expect_token(p, 8)) && (literal_2 = _PyPegen_expect_token(p, 11)) @@ -3124,7 +3128,7 @@ with_stmt_rule(Parser *p) } p->mark = mark; } - { // ASYNC 'with' '(' ','.with_item+ ')' ':' block + { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block asdl_seq * a; Token * async_var; asdl_seq* b; @@ -3132,6 +3136,8 @@ with_stmt_rule(Parser *p) Token * literal; Token * literal_1; Token * literal_2; + void *opt_var; + UNUSED(opt_var); // Silence compiler warnings if ( (async_var = _PyPegen_expect_token(p, ASYNC)) && @@ -3141,6 +3147,8 @@ with_stmt_rule(Parser *p) && (a = _gather_42_rule(p)) && + (opt_var = _PyPegen_expect_token(p, 12), 1) + && (literal_1 = _PyPegen_expect_token(p, 8)) && (literal_2 = _PyPegen_expect_token(p, 11)) pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy