aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
context:
space:
mode:
-rw-r--r--Doc/reference/compound_stmts.rst5
-rw-r--r--Doc/whatsnew/3.14.rst27
-rw-r--r--Grammar/python.gram20
-rw-r--r--Lib/test/test_grammar.py4
-rw-r--r--Lib/test/test_syntax.py38
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-03-28-18-59-24.gh-issue-131833.v4WvWT.rst2
-rw-r--r--Parser/parser.c547
7 files changed, 377 insertions, 266 deletions
diff --git a/Doc/reference/compound_stmts.rst b/Doc/reference/compound_stmts.rst
index 72e65614c24..7f6daae2169 100644
--- a/Doc/reference/compound_stmts.rst
+++ b/Doc/reference/compound_stmts.rst
@@ -232,6 +232,8 @@ Additional information on exceptions can be found in section :ref:`exceptions`,
and information on using the :keyword:`raise` statement to generate exceptions
may be found in section :ref:`raise`.
+.. versionchanged:: next
+ Support for optionally dropping grouping parentheses when using multiple exception types. See :pep:`758`.
.. _except:
@@ -247,7 +249,8 @@ An expression-less :keyword:`!except` clause, if present, must be last;
it matches any exception.
For an :keyword:`!except` clause with an expression, the
-expression must evaluate to an exception type or a tuple of exception types.
+expression must evaluate to an exception type or a tuple of exception types. Parentheses
+can be dropped if multiple exception types are provided and the ``as`` clause is not used.
The raised exception matches an :keyword:`!except` clause whose expression evaluates
to the class or a :term:`non-virtual base class <abstract base class>` of the exception object,
or to a tuple that contains such a class.
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index 8a617d4a123..5c0f2829809 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -90,6 +90,33 @@ If you encounter :exc:`NameError`\s or pickling errors coming out of
New features
============
+.. _whatsnew314-pep758:
+
+PEP 758 – Allow except and except* expressions without parentheses
+------------------------------------------------------------------
+
+The :keyword:`except` and :keyword:`except* <except_star>` expressions now allow
+parentheses to be omitted when there are multiple exception types and the ``as`` clause is not used.
+For example the following expressions are now valid:
+
+.. code-block:: python
+
+ try:
+ release_new_sleep_token_album()
+ except AlbumNotFound, SongsTooGoodToBeReleased:
+ print("Sorry, no new album this year.")
+
+ # The same applies to except* (for exception groups):
+ try:
+ release_new_sleep_token_album()
+ except* AlbumNotFound, SongsTooGoodToBeReleased:
+ print("Sorry, no new album this year.")
+
+Check :pep:`758` for more details.
+
+(Contributed by Pablo Galindo and Brett Cannon in :gh:`131831`.)
+
+
.. _whatsnew314-pep649:
PEP 649: deferred evaluation of annotations
diff --git a/Grammar/python.gram b/Grammar/python.gram
index 53288a5418c..de2d9c7508f 100644
--- a/Grammar/python.gram
+++ b/Grammar/python.gram
@@ -435,14 +435,18 @@ try_stmt[stmt_ty]:
except_block[excepthandler_ty]:
| invalid_except_stmt_indent
- | 'except' e=expression t=['as' z=NAME { z }] ':' b=block {
- _PyAST_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) }
+ | 'except' e=expressions ':' b=block {
+ _PyAST_ExceptHandler(e, NULL, b, EXTRA) }
+ | 'except' e=expression 'as' t=NAME ':' b=block {
+ _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }
| 'except' ':' b=block { _PyAST_ExceptHandler(NULL, NULL, b, EXTRA) }
| invalid_except_stmt
except_star_block[excepthandler_ty]:
| invalid_except_star_stmt_indent
- | 'except' '*' e=expression t=['as' z=NAME { z }] ':' b=block {
- _PyAST_ExceptHandler(e, (t) ? ((expr_ty) t)->v.Name.id : NULL, b, EXTRA) }
+ | 'except' '*' e=expressions ':' b=block {
+ _PyAST_ExceptHandler(e, NULL, b, EXTRA) }
+ | 'except' '*' e=expression 'as' t=NAME ':' b=block {
+ _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }
| invalid_except_star_stmt
finally_block[asdl_stmt_seq*]:
| invalid_finally_stmt
@@ -1356,16 +1360,16 @@ invalid_try_stmt:
| 'try' ':' block* except_star_block+ a='except' [expression ['as' NAME]] ':' {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot have both 'except' and 'except*' on the same 'try'") }
invalid_except_stmt:
- | 'except' a=expression ',' expressions ['as' NAME ] ':' {
- RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized") }
+ | 'except' a=expression ',' expressions 'as' NAME ':' {
+ RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized when using 'as'") }
| a='except' expression ['as' NAME ] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| a='except' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| 'except' expression 'as' a=expression {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
a, "cannot use except statement with %s", _PyPegen_get_expr_name(a)) }
invalid_except_star_stmt:
- | 'except' '*' a=expression ',' expressions ['as' NAME ] ':' {
- RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized") }
+ | 'except' '*' a=expression ',' expressions 'as' NAME ':' {
+ RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized when using 'as'") }
| a='except' '*' expression ['as' NAME ] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| a='except' '*' (NEWLINE | ':') { RAISE_SYNTAX_ERROR("expected one or more exception types") }
| 'except' '*' expression 'as' a=expression {
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index 3f1e1fa51fd..3ea4e47ca50 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -1375,6 +1375,8 @@ class GrammarTests(unittest.TestCase):
try: 1/0
except (EOFError, TypeError, ZeroDivisionError): pass
try: 1/0
+ except EOFError, TypeError, ZeroDivisionError: pass
+ try: 1/0
except (EOFError, TypeError, ZeroDivisionError) as msg: pass
try: pass
finally: pass
@@ -1398,6 +1400,8 @@ class GrammarTests(unittest.TestCase):
try: 1/0
except* (EOFError, TypeError, ZeroDivisionError): pass
try: 1/0
+ except* EOFError, TypeError, ZeroDivisionError: pass
+ try: 1/0
except* (EOFError, TypeError, ZeroDivisionError) as msg: pass
try: pass
finally: pass
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index d2950cf48ab..2c87b145254 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -1667,28 +1667,14 @@ Make sure that the old "raise X, Y[, Z]" form is gone:
SyntaxError: invalid syntax
Check that an multiple exception types with missing parentheses
-raise a custom exception
-
- >>> try:
- ... pass
- ... except A, B:
- ... pass
- Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
-
- >>> try:
- ... pass
- ... except A, B, C:
- ... pass
- Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
+raise a custom exception only when using 'as'
>>> try:
... pass
... except A, B, C as blech:
... pass
Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
+ SyntaxError: multiple exception types must be parenthesized when using 'as'
>>> try:
... pass
@@ -1697,29 +1683,15 @@ raise a custom exception
... finally:
... pass
Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
+ SyntaxError: multiple exception types must be parenthesized when using 'as'
>>> try:
... pass
- ... except* A, B:
- ... pass
- Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
-
- >>> try:
- ... pass
- ... except* A, B, C:
- ... pass
- Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
-
- >>> try:
- ... pass
... except* A, B, C as blech:
... pass
Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
+ SyntaxError: multiple exception types must be parenthesized when using 'as'
>>> try:
... pass
@@ -1728,7 +1700,7 @@ raise a custom exception
... finally:
... pass
Traceback (most recent call last):
- SyntaxError: multiple exception types must be parenthesized
+ SyntaxError: multiple exception types must be parenthesized when using 'as'
Custom exception for 'except*' without an exception type
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-03-28-18-59-24.gh-issue-131833.v4WvWT.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-03-28-18-59-24.gh-issue-131833.v4WvWT.rst
new file mode 100644
index 00000000000..e19140dd568
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-03-28-18-59-24.gh-issue-131833.v4WvWT.rst
@@ -0,0 +1,2 @@
+Add support for optionally dropping grouping parentheses when using multiple
+exception types as per :pep:`758`. Patch by Pablo Galindo
diff --git a/Parser/parser.c b/Parser/parser.c
index edebddc95f9..f39ad950e16 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -21,54 +21,54 @@ static KeywordToken *reserved_keywords[] = {
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {
- {"if", 675},
- {"as", 673},
- {"in", 686},
- {"or", 584},
- {"is", 592},
+ {"if", 677},
+ {"as", 675},
+ {"in", 688},
+ {"or", 586},
+ {"is", 594},
{NULL, -1},
},
(KeywordToken[]) {
- {"del", 620},
- {"def", 690},
- {"for", 685},
- {"try", 649},
- {"and", 585},
- {"not", 694},
+ {"del", 622},
+ {"def", 692},
+ {"for", 687},
+ {"try", 651},
+ {"and", 587},
+ {"not", 696},
{NULL, -1},
},
(KeywordToken[]) {
- {"from", 628},
+ {"from", 630},
{"pass", 526},
- {"with", 640},
- {"elif", 677},
- {"else", 678},
- {"None", 618},
- {"True", 617},
+ {"with", 642},
+ {"elif", 679},
+ {"else", 680},
+ {"None", 620},
+ {"True", 619},
{NULL, -1},
},
(KeywordToken[]) {
{"raise", 525},
- {"yield", 583},
+ {"yield", 585},
{"break", 527},
- {"async", 689},
- {"class", 692},
- {"while", 680},
- {"False", 619},
- {"await", 593},
+ {"async", 691},
+ {"class", 694},
+ {"while", 682},
+ {"False", 621},
+ {"await", 595},
{NULL, -1},
},
(KeywordToken[]) {
{"return", 522},
- {"import", 629},
+ {"import", 631},
{"assert", 532},
{"global", 529},
- {"except", 670},
- {"lambda", 616},
+ {"except", 672},
+ {"lambda", 618},
{NULL, -1},
},
(KeywordToken[]) {
- {"finally", 666},
+ {"finally", 668},
{NULL, -1},
},
(KeywordToken[]) {
@@ -1667,7 +1667,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt"));
stmt_ty del_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 620) // token='del'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 622) // token='del'
&&
(del_stmt_var = del_stmt_rule(p)) // del_stmt
)
@@ -1688,7 +1688,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt"));
stmt_ty yield_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 583) // token='yield'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 585) // token='yield'
&&
(yield_stmt_var = yield_stmt_rule(p)) // yield_stmt
)
@@ -1863,7 +1863,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt"));
stmt_ty if_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 675) // token='if'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 677) // token='if'
&&
(if_stmt_var = if_stmt_rule(p)) // if_stmt
)
@@ -1947,7 +1947,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt"));
stmt_ty try_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 649) // token='try'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 651) // token='try'
&&
(try_stmt_var = try_stmt_rule(p)) // try_stmt
)
@@ -1968,7 +1968,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt"));
stmt_ty while_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 680) // token='while'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 682) // token='while'
&&
(while_stmt_var = while_stmt_rule(p)) // while_stmt
)
@@ -3136,7 +3136,7 @@ del_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 620)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='del'
&&
(a = del_targets_rule(p)) // del_targets
&&
@@ -3425,7 +3425,7 @@ import_name_rule(Parser *p)
Token * _keyword;
asdl_alias_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 629)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 631)) // token='import'
&&
(a = dotted_as_names_rule(p)) // dotted_as_names
)
@@ -3494,13 +3494,13 @@ import_from_rule(Parser *p)
expr_ty b;
asdl_alias_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 628)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='from'
&&
(a = _loop0_18_rule(p)) // (('.' | '...'))*
&&
(b = dotted_name_rule(p)) // dotted_name
&&
- (_keyword_1 = _PyPegen_expect_token(p, 629)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='import'
&&
(c = import_from_targets_rule(p)) // import_from_targets
)
@@ -3538,11 +3538,11 @@ import_from_rule(Parser *p)
asdl_seq * a;
asdl_alias_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 628)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='from'
&&
(a = _loop1_19_rule(p)) // (('.' | '...'))+
&&
- (_keyword_1 = _PyPegen_expect_token(p, 629)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 631)) // token='import'
&&
(b = import_from_targets_rule(p)) // import_from_targets
)
@@ -4291,7 +4291,7 @@ class_def_raw_rule(Parser *p)
asdl_stmt_seq* c;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 692)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 694)) // token='class'
&&
(a = _PyPegen_name_token(p)) // NAME
&&
@@ -4458,7 +4458,7 @@ function_def_raw_rule(Parser *p)
void *t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 690)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 692)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4519,9 +4519,9 @@ function_def_raw_rule(Parser *p)
void *t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='def'
+ (_keyword_1 = _PyPegen_expect_token(p, 692)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -5859,7 +5859,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -5904,7 +5904,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -5999,7 +5999,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 677)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 679)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6044,7 +6044,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 677)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 679)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6125,7 +6125,7 @@ else_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 678)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 680)) // token='else'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6204,7 +6204,7 @@ while_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 680)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='while'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6304,11 +6304,11 @@ for_stmt_rule(Parser *p)
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 688)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6366,13 +6366,13 @@ for_stmt_rule(Parser *p)
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword_1 = _PyPegen_expect_token(p, 687)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_2 = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword_2 = _PyPegen_expect_token(p, 688)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6501,7 +6501,7 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -6552,7 +6552,7 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='with'
&&
(a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+
&&
@@ -6601,9 +6601,9 @@ with_stmt_rule(Parser *p)
asdl_withitem_seq* a;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 640)) // token='with'
+ (_keyword_1 = _PyPegen_expect_token(p, 642)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -6653,9 +6653,9 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 640)) // token='with'
+ (_keyword_1 = _PyPegen_expect_token(p, 642)) // token='with'
&&
(a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+
&&
@@ -6741,7 +6741,7 @@ with_item_rule(Parser *p)
if (
(e = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 675)) // token='as'
&&
(t = star_target_rule(p)) // star_target
&&
@@ -6866,7 +6866,7 @@ try_stmt_rule(Parser *p)
asdl_stmt_seq* b;
asdl_stmt_seq* f;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6910,7 +6910,7 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6958,7 +6958,7 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7001,7 +7001,8 @@ try_stmt_rule(Parser *p)
// except_block:
// | invalid_except_stmt_indent
-// | 'except' expression ['as' NAME] ':' block
+// | 'except' expressions ':' block
+// | 'except' expression 'as' NAME ':' block
// | 'except' ':' block
// | invalid_except_stmt
static excepthandler_ty
@@ -7044,30 +7045,75 @@ except_block_rule(Parser *p)
D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_except_stmt_indent"));
}
- { // 'except' expression ['as' NAME] ':' block
+ { // 'except' expressions ':' block
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block"));
+ D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expressions ':' block"));
Token * _keyword;
Token * _literal;
asdl_stmt_seq* b;
expr_ty e;
- void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
+ &&
+ (e = expressions_rule(p)) // expressions
+ &&
+ (_literal = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ (b = block_rule(p)) // block
+ )
+ {
+ D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expressions ':' block"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyAST_ExceptHandler ( e , NULL , b , EXTRA );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expressions ':' block"));
+ }
+ { // 'except' expression 'as' NAME ':' block
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression 'as' NAME ':' block"));
+ Token * _keyword;
+ Token * _keyword_1;
+ Token * _literal;
+ asdl_stmt_seq* b;
+ expr_ty e;
+ expr_ty t;
+ if (
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
&&
(e = expression_rule(p)) // expression
&&
- (t = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_keyword_1 = _PyPegen_expect_token(p, 675)) // token='as'
+ &&
+ (t = _PyPegen_name_token(p)) // NAME
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
(b = block_rule(p)) // block
)
{
- D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block"));
+ D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression 'as' NAME ':' block"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -7077,7 +7123,7 @@ except_block_rule(Parser *p)
UNUSED(_end_lineno); // Only used by EXTRA macro
int _end_col_offset = _token->end_col_offset;
UNUSED(_end_col_offset); // Only used by EXTRA macro
- _res = _PyAST_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA );
+ _res = _PyAST_ExceptHandler ( e , ( ( expr_ty ) t ) -> v . Name . id , b , EXTRA );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
p->level--;
@@ -7087,7 +7133,7 @@ except_block_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression ['as' NAME] ':' block"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression 'as' NAME ':' block"));
}
{ // 'except' ':' block
if (p->error_indicator) {
@@ -7099,7 +7145,7 @@ except_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7155,7 +7201,8 @@ except_block_rule(Parser *p)
// except_star_block:
// | invalid_except_star_stmt_indent
-// | 'except' '*' expression ['as' NAME] ':' block
+// | 'except' '*' expressions ':' block
+// | 'except' '*' expression 'as' NAME ':' block
// | invalid_except_star_stmt
static excepthandler_ty
except_star_block_rule(Parser *p)
@@ -7197,33 +7244,81 @@ except_star_block_rule(Parser *p)
D(fprintf(stderr, "%*c%s except_star_block[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_except_star_stmt_indent"));
}
- { // 'except' '*' expression ['as' NAME] ':' block
+ { // 'except' '*' expressions ':' block
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> except_star_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' expression ['as' NAME] ':' block"));
+ D(fprintf(stderr, "%*c> except_star_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' expressions ':' block"));
Token * _keyword;
Token * _literal;
Token * _literal_1;
asdl_stmt_seq* b;
expr_ty e;
- void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
+ &&
+ (_literal = _PyPegen_expect_token(p, 16)) // token='*'
+ &&
+ (e = expressions_rule(p)) // expressions
+ &&
+ (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
+ &&
+ (b = block_rule(p)) // block
+ )
+ {
+ D(fprintf(stderr, "%*c+ except_star_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' expressions ':' block"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyAST_ExceptHandler ( e , NULL , b , EXTRA );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s except_star_block[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' expressions ':' block"));
+ }
+ { // 'except' '*' expression 'as' NAME ':' block
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> except_star_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' expression 'as' NAME ':' block"));
+ Token * _keyword;
+ Token * _keyword_1;
+ Token * _literal;
+ Token * _literal_1;
+ asdl_stmt_seq* b;
+ expr_ty e;
+ expr_ty t;
+ if (
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(e = expression_rule(p)) // expression
&&
- (t = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_keyword_1 = _PyPegen_expect_token(p, 675)) // token='as'
+ &&
+ (t = _PyPegen_name_token(p)) // NAME
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
(b = block_rule(p)) // block
)
{
- D(fprintf(stderr, "%*c+ except_star_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' expression ['as' NAME] ':' block"));
+ D(fprintf(stderr, "%*c+ except_star_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' expression 'as' NAME ':' block"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -7233,7 +7328,7 @@ except_star_block_rule(Parser *p)
UNUSED(_end_lineno); // Only used by EXTRA macro
int _end_col_offset = _token->end_col_offset;
UNUSED(_end_col_offset); // Only used by EXTRA macro
- _res = _PyAST_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA );
+ _res = _PyAST_ExceptHandler ( e , ( ( expr_ty ) t ) -> v . Name . id , b , EXTRA );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
p->level--;
@@ -7243,7 +7338,7 @@ except_star_block_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s except_star_block[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' expression ['as' NAME] ':' block"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' expression 'as' NAME ':' block"));
}
if (p->call_invalid_rules) { // invalid_except_star_stmt
if (p->error_indicator) {
@@ -7312,7 +7407,7 @@ finally_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 666)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='finally'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7620,7 +7715,7 @@ guard_rule(Parser *p)
Token * _keyword;
expr_ty guard;
if (
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(guard = named_expression_rule(p)) // named_expression
)
@@ -7815,7 +7910,7 @@ as_pattern_rule(Parser *p)
if (
(pattern = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 675)) // token='as'
&&
(target = pattern_capture_target_rule(p)) // pattern_capture_target
)
@@ -8249,7 +8344,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 618)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8282,7 +8377,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 617)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 619)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8315,7 +8410,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 619)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 621)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -8441,7 +8536,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 618)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8474,7 +8569,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 617)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 619)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8507,7 +8602,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 619)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 621)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -11110,11 +11205,11 @@ expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 678)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -11218,9 +11313,9 @@ yield_expr_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 583)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 585)) // token='yield'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 628)) // token='from'
+ (_keyword_1 = _PyPegen_expect_token(p, 630)) // token='from'
&&
(a = expression_rule(p)) // expression
)
@@ -11256,7 +11351,7 @@ yield_expr_rule(Parser *p)
Token * _keyword;
void *a;
if (
- (_keyword = _PyPegen_expect_token(p, 583)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 585)) // token='yield'
&&
(a = star_expressions_rule(p), !p->error_indicator) // star_expressions?
)
@@ -11996,7 +12091,7 @@ inversion_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 694)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 696)) // token='not'
&&
(a = inversion_rule(p)) // inversion
)
@@ -12650,9 +12745,9 @@ notin_bitwise_or_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 694)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 696)) // token='not'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 688)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12698,7 +12793,7 @@ in_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 688)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12745,9 +12840,9 @@ isnot_bitwise_or_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 592)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 594)) // token='is'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 694)) // token='not'
+ (_keyword_1 = _PyPegen_expect_token(p, 696)) // token='not'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12793,7 +12888,7 @@ is_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 592)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 594)) // token='is'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -14109,7 +14204,7 @@ await_primary_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 593)) // token='await'
+ (_keyword = _PyPegen_expect_token(p, 595)) // token='await'
&&
(a = primary_rule(p)) // primary
)
@@ -14653,7 +14748,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 617)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 619)) // token='True'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -14686,7 +14781,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 619)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 621)) // token='False'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -14719,7 +14814,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 618)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='None'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -14987,7 +15082,7 @@ lambdef_rule(Parser *p)
void *a;
expr_ty b;
if (
- (_keyword = _PyPegen_expect_token(p, 616)) // token='lambda'
+ (_keyword = _PyPegen_expect_token(p, 618)) // token='lambda'
&&
(a = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -16918,13 +17013,13 @@ for_if_clause_rule(Parser *p)
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword_1 = _PyPegen_expect_token(p, 687)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_2 = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword_2 = _PyPegen_expect_token(p, 688)) // token='in'
&&
(_cut_var = 1)
&&
@@ -16963,11 +17058,11 @@ for_if_clause_rule(Parser *p)
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 688)) // token='in'
&&
(_cut_var = 1)
&&
@@ -20268,11 +20363,11 @@ expression_without_invalid_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 678)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -20571,7 +20666,7 @@ invalid_expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
@@ -20604,11 +20699,11 @@ invalid_expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 678)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='else'
&&
_PyPegen_lookahead(0, (void *(*)(Parser *)) expression_rule, p)
)
@@ -20640,11 +20735,11 @@ invalid_expression_rule(Parser *p)
if (
(a = (stmt_ty)_tmp_114_rule(p)) // pass_stmt | break_stmt | continue_stmt
&&
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 678)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='else'
&&
(c = simple_stmt_rule(p)) // simple_stmt
)
@@ -20673,7 +20768,7 @@ invalid_expression_rule(Parser *p)
Token * a;
Token * b;
if (
- (a = _PyPegen_expect_token(p, 616)) // token='lambda'
+ (a = _PyPegen_expect_token(p, 618)) // token='lambda'
&&
(_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -21144,7 +21239,7 @@ invalid_del_stmt_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 620)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='del'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -22580,7 +22675,7 @@ invalid_with_item_rule(Parser *p)
if (
(expression_var = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 675)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
@@ -22630,13 +22725,13 @@ invalid_for_if_clause_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
void *_tmp_133_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='for'
&&
(_tmp_133_var = _tmp_133_rule(p)) // bitwise_or ((',' bitwise_or))* ','?
&&
- _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 686) // token='in'
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 688) // token='in'
)
{
D(fprintf(stderr, "%*c+ invalid_for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'"));
@@ -22682,9 +22777,9 @@ invalid_for_target_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
expr_ty a;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='for'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -22814,11 +22909,11 @@ invalid_import_rule(Parser *p)
Token * a;
expr_ty dotted_name_var;
if (
- (a = _PyPegen_expect_token(p, 629)) // token='import'
+ (a = _PyPegen_expect_token(p, 631)) // token='import'
&&
(_gather_135_var = _gather_135_rule(p)) // ','.dotted_name+
&&
- (_keyword = _PyPegen_expect_token(p, 628)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='from'
&&
(dotted_name_var = dotted_name_rule(p)) // dotted_name
)
@@ -22845,7 +22940,7 @@ invalid_import_rule(Parser *p)
Token * _keyword;
Token * token;
if (
- (_keyword = _PyPegen_expect_token(p, 629)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 631)) // token='import'
&&
(token = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -22969,9 +23064,9 @@ invalid_with_stmt_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 640)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='with'
&&
(_gather_137_var = _gather_137_rule(p)) // ','.(expression ['as' star_target])+
&&
@@ -23007,9 +23102,9 @@ invalid_with_stmt_rule(Parser *p)
UNUSED(_opt_var_1); // Silence compiler warnings
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 640)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -23069,9 +23164,9 @@ invalid_with_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 640)) // token='with'
+ (a = _PyPegen_expect_token(p, 642)) // token='with'
&&
(_gather_137_var = _gather_137_rule(p)) // ','.(expression ['as' star_target])+
&&
@@ -23112,9 +23207,9 @@ invalid_with_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 640)) // token='with'
+ (a = _PyPegen_expect_token(p, 642)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
@@ -23177,7 +23272,7 @@ invalid_try_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 649)) // token='try'
+ (a = _PyPegen_expect_token(p, 651)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23209,7 +23304,7 @@ invalid_try_stmt_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* block_var;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23248,7 +23343,7 @@ invalid_try_stmt_rule(Parser *p)
Token * b;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23256,7 +23351,7 @@ invalid_try_stmt_rule(Parser *p)
&&
(_loop1_37_var = _loop1_37_rule(p)) // except_block+
&&
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(b = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -23295,7 +23390,7 @@ invalid_try_stmt_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
if (
- (_keyword = _PyPegen_expect_token(p, 649)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 651)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23303,7 +23398,7 @@ invalid_try_stmt_rule(Parser *p)
&&
(_loop1_38_var = _loop1_38_rule(p)) // except_star_block+
&&
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_opt_var = _tmp_142_rule(p), !p->error_indicator) // [expression ['as' NAME]]
&&
@@ -23330,7 +23425,7 @@ invalid_try_stmt_rule(Parser *p)
}
// invalid_except_stmt:
-// | 'except' expression ',' expressions ['as' NAME] ':'
+// | 'except' expression ',' expressions 'as' NAME ':'
// | 'except' expression ['as' NAME] NEWLINE
// | 'except' NEWLINE
// | 'except' expression 'as' expression
@@ -23346,21 +23441,21 @@ invalid_except_stmt_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // 'except' expression ',' expressions ['as' NAME] ':'
+ { // 'except' expression ',' expressions 'as' NAME ':'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression ',' expressions ['as' NAME] ':'"));
+ D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression ',' expressions 'as' NAME ':'"));
Token * _keyword;
+ Token * _keyword_1;
Token * _literal;
Token * _literal_1;
- void *_opt_var;
- UNUSED(_opt_var); // Silence compiler warnings
expr_ty a;
expr_ty expressions_var;
+ expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
&&
(a = expression_rule(p)) // expression
&&
@@ -23368,13 +23463,15 @@ invalid_except_stmt_rule(Parser *p)
&&
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_keyword_1 = _PyPegen_expect_token(p, 675)) // token='as'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression ',' expressions ['as' NAME] ':'"));
- _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "multiple exception types must be parenthesized" );
+ D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression ',' expressions 'as' NAME ':'"));
+ _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "multiple exception types must be parenthesized when using 'as'" );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
p->level--;
@@ -23384,7 +23481,7 @@ invalid_except_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_except_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression ',' expressions ['as' NAME] ':'"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression ',' expressions 'as' NAME ':'"));
}
{ // 'except' expression ['as' NAME] NEWLINE
if (p->error_indicator) {
@@ -23398,7 +23495,7 @@ invalid_except_stmt_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
@@ -23429,7 +23526,7 @@ invalid_except_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -23458,11 +23555,11 @@ invalid_except_stmt_rule(Parser *p)
expr_ty a;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_keyword_1 = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 675)) // token='as'
&&
(a = expression_rule(p)) // expression
)
@@ -23487,7 +23584,7 @@ invalid_except_stmt_rule(Parser *p)
}
// invalid_except_star_stmt:
-// | 'except' '*' expression ',' expressions ['as' NAME] ':'
+// | 'except' '*' expression ',' expressions 'as' NAME ':'
// | 'except' '*' expression ['as' NAME] NEWLINE
// | 'except' '*' (NEWLINE | ':')
// | 'except' '*' expression 'as' expression
@@ -23503,22 +23600,22 @@ invalid_except_star_stmt_rule(Parser *p)
}
void * _res = NULL;
int _mark = p->mark;
- { // 'except' '*' expression ',' expressions ['as' NAME] ':'
+ { // 'except' '*' expression ',' expressions 'as' NAME ':'
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> invalid_except_star_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' expression ',' expressions ['as' NAME] ':'"));
+ D(fprintf(stderr, "%*c> invalid_except_star_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' expression ',' expressions 'as' NAME ':'"));
Token * _keyword;
+ Token * _keyword_1;
Token * _literal;
Token * _literal_1;
Token * _literal_2;
- void *_opt_var;
- UNUSED(_opt_var); // Silence compiler warnings
expr_ty a;
expr_ty expressions_var;
+ expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -23528,13 +23625,15 @@ invalid_except_star_stmt_rule(Parser *p)
&&
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_keyword_1 = _PyPegen_expect_token(p, 675)) // token='as'
+ &&
+ (name_var = _PyPegen_name_token(p)) // NAME
&&
(_literal_2 = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ invalid_except_star_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' expression ',' expressions ['as' NAME] ':'"));
- _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "multiple exception types must be parenthesized" );
+ D(fprintf(stderr, "%*c+ invalid_except_star_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' expression ',' expressions 'as' NAME ':'"));
+ _res = RAISE_SYNTAX_ERROR_STARTING_FROM ( a , "multiple exception types must be parenthesized when using 'as'" );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
p->level--;
@@ -23544,7 +23643,7 @@ invalid_except_star_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s invalid_except_star_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' expression ',' expressions ['as' NAME] ':'"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' expression ',' expressions 'as' NAME ':'"));
}
{ // 'except' '*' expression ['as' NAME] NEWLINE
if (p->error_indicator) {
@@ -23559,7 +23658,7 @@ invalid_except_star_stmt_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -23593,7 +23692,7 @@ invalid_except_star_stmt_rule(Parser *p)
void *_tmp_143_var;
Token * a;
if (
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -23625,13 +23724,13 @@ invalid_except_star_stmt_rule(Parser *p)
expr_ty a;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_keyword_1 = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 675)) // token='as'
&&
(a = expression_rule(p)) // expression
)
@@ -23678,7 +23777,7 @@ invalid_finally_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 666)) // token='finally'
+ (a = _PyPegen_expect_token(p, 668)) // token='finally'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23734,7 +23833,7 @@ invalid_except_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
@@ -23770,7 +23869,7 @@ invalid_except_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -23826,7 +23925,7 @@ invalid_except_star_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 670)) // token='except'
+ (a = _PyPegen_expect_token(p, 672)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -24065,7 +24164,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 675)) // token='as'
&&
(a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"'
)
@@ -24095,7 +24194,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 675)) // token='as'
&&
(a = expression_rule(p)) // expression
)
@@ -24247,7 +24346,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24278,7 +24377,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty a_1;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 675)) // token='if'
+ (a = _PyPegen_expect_token(p, 677)) // token='if'
&&
(a_1 = named_expression_rule(p)) // named_expression
&&
@@ -24333,7 +24432,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 677)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 679)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24364,7 +24463,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 677)) // token='elif'
+ (a = _PyPegen_expect_token(p, 679)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24417,7 +24516,7 @@ invalid_else_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 678)) // token='else'
+ (a = _PyPegen_expect_token(p, 680)) // token='else'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -24470,7 +24569,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 680)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24501,7 +24600,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 680)) // token='while'
+ (a = _PyPegen_expect_token(p, 682)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -24560,13 +24659,13 @@ invalid_for_stmt_rule(Parser *p)
expr_ty star_expressions_var;
expr_ty star_targets_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 688)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -24601,13 +24700,13 @@ invalid_for_stmt_rule(Parser *p)
expr_ty star_expressions_var;
expr_ty star_targets_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 685)) // token='for'
+ (a = _PyPegen_expect_token(p, 687)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword = _PyPegen_expect_token(p, 686)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 688)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -24673,9 +24772,9 @@ invalid_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 690)) // token='def'
+ (a = _PyPegen_expect_token(p, 692)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -24732,9 +24831,9 @@ invalid_def_raw_rule(Parser *p)
asdl_stmt_seq* block_var;
expr_ty name_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 689), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 691), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 690)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 692)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -24798,7 +24897,7 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 692)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 694)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -24837,7 +24936,7 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 692)) // token='class'
+ (a = _PyPegen_expect_token(p, 694)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -25655,7 +25754,7 @@ invalid_arithmetic_rule(Parser *p)
&&
(_tmp_150_var = _tmp_150_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@'
&&
- (a = _PyPegen_expect_token(p, 694)) // token='not'
+ (a = _PyPegen_expect_token(p, 696)) // token='not'
&&
(b = inversion_rule(p)) // inversion
)
@@ -25704,7 +25803,7 @@ invalid_factor_rule(Parser *p)
if (
(_tmp_151_var = _tmp_151_rule(p)) // '+' | '-' | '~'
&&
- (a = _PyPegen_expect_token(p, 694)) // token='not'
+ (a = _PyPegen_expect_token(p, 696)) // token='not'
&&
(b = factor_rule(p)) // factor
)
@@ -26051,7 +26150,7 @@ _tmp_5_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 629)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 631)) // token='import'
)
{
D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
@@ -26070,7 +26169,7 @@ _tmp_5_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 628)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='from'
)
{
D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
@@ -26108,7 +26207,7 @@ _tmp_6_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 690)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 692)) // token='def'
)
{
D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
@@ -26146,7 +26245,7 @@ _tmp_6_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
)
{
D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
@@ -26184,7 +26283,7 @@ _tmp_7_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 692)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 694)) // token='class'
)
{
D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
@@ -26241,7 +26340,7 @@ _tmp_8_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 640)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 642)) // token='with'
)
{
D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
@@ -26260,7 +26359,7 @@ _tmp_8_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
)
{
D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
@@ -26298,7 +26397,7 @@ _tmp_9_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 685)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='for'
)
{
D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
@@ -26317,7 +26416,7 @@ _tmp_9_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 689)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='async'
)
{
D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
@@ -26542,7 +26641,7 @@ _tmp_13_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 628)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='from'
&&
(z = expression_rule(p)) // expression
)
@@ -27064,7 +27163,7 @@ _tmp_22_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 675)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
@@ -32426,7 +32525,7 @@ _tmp_109_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 617)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 619)) // token='True'
)
{
D(fprintf(stderr, "%*c+ _tmp_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -32445,7 +32544,7 @@ _tmp_109_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 619)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 621)) // token='False'
)
{
D(fprintf(stderr, "%*c+ _tmp_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -32464,7 +32563,7 @@ _tmp_109_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 618)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='None'
)
{
D(fprintf(stderr, "%*c+ _tmp_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -32675,7 +32774,7 @@ _tmp_113_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 678)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 680)) // token='else'
)
{
D(fprintf(stderr, "%*c+ _tmp_113[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
@@ -32922,7 +33021,7 @@ _tmp_116_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 617)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 619)) // token='True'
)
{
D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -32941,7 +33040,7 @@ _tmp_116_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 618)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 620)) // token='None'
)
{
D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -32960,7 +33059,7 @@ _tmp_116_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 619)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 621)) // token='False'
)
{
D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -34373,7 +34472,7 @@ _tmp_140_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 670)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 672)) // token='except'
)
{
D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
@@ -34392,7 +34491,7 @@ _tmp_140_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 666)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 668)) // token='finally'
)
{
D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
@@ -35390,7 +35489,7 @@ _tmp_156_rule(Parser *p)
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 584)) // token='or'
+ (_keyword = _PyPegen_expect_token(p, 586)) // token='or'
&&
(c = conjunction_rule(p)) // conjunction
)
@@ -35436,7 +35535,7 @@ _tmp_157_rule(Parser *p)
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 585)) // token='and'
+ (_keyword = _PyPegen_expect_token(p, 587)) // token='and'
&&
(c = inversion_rule(p)) // inversion
)
@@ -35596,7 +35695,7 @@ _tmp_160_rule(Parser *p)
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 675)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 677)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
@@ -36178,7 +36277,7 @@ _tmp_172_rule(Parser *p)
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 673)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 675)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)