aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Grammar/python.gram
diff options
context:
space:
mode:
Diffstat (limited to 'Grammar/python.gram')
-rw-r--r--Grammar/python.gram163
1 files changed, 120 insertions, 43 deletions
diff --git a/Grammar/python.gram b/Grammar/python.gram
index c7563aba0ad..d1af7704e9b 100644
--- a/Grammar/python.gram
+++ b/Grammar/python.gram
@@ -79,7 +79,7 @@ _PyPegen_parse(Parser *p)
# ~
# Commit to the current alternative, even if it fails to parse.
# &&e
-# Eager parse e. The parser will not backtrack and will immediately
+# Eager parse e. The parser will not backtrack and will immediately
# fail with SyntaxError if e cannot be parsed.
#
@@ -96,12 +96,12 @@ func_type[mod_ty]: '(' a=[type_expressions] ')' '->' b=expression NEWLINE* ENDMA
statements[asdl_stmt_seq*]: a=statement+ { _PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_seq_flatten(p, a)) }
-statement[asdl_stmt_seq*]:
- | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) }
+statement[asdl_stmt_seq*]:
+ | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) }
| a[asdl_stmt_seq*]=simple_stmts { a }
single_compound_stmt[asdl_stmt_seq*]:
- | a=compound_stmt {
+ | a=compound_stmt {
_PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a)) }
statement_newline[asdl_stmt_seq*]:
@@ -184,7 +184,9 @@ return_stmt[stmt_ty]:
| 'return' a=[star_expressions] { _PyAST_Return(a, EXTRA) }
raise_stmt[stmt_ty]:
- | 'raise' a=expression b=['from' z=expression { z }] { _PyAST_Raise(a, b, EXTRA) }
+ | 'raise' a=expression 'from' b=expression { _PyAST_Raise(a, b, EXTRA) }
+ | invalid_raise_stmt
+ | 'raise' a=expression { _PyAST_Raise(a, NULL, EXTRA) }
| 'raise' { _PyAST_Raise(NULL, NULL, EXTRA) }
pass_stmt[stmt_ty]:
@@ -233,15 +235,17 @@ import_from_targets[asdl_alias_seq*]:
import_from_as_names[asdl_alias_seq*]:
| a[asdl_alias_seq*]=','.import_from_as_name+ { a }
import_from_as_name[alias_ty]:
- | a=NAME b=['as' z=NAME { z }] { _PyAST_alias(a->v.Name.id,
- (b) ? ((expr_ty) b)->v.Name.id : NULL,
- EXTRA) }
+ | invalid_import_from_as_name
+ | a=NAME b=['as' z=NAME { z }] { _PyAST_alias(
+ a->v.Name.id, (b) ? ((expr_ty) b)->v.Name.id : NULL, EXTRA) }
+
dotted_as_names[asdl_alias_seq*]:
| a[asdl_alias_seq*]=','.dotted_as_name+ { a }
dotted_as_name[alias_ty]:
- | a=dotted_name b=['as' z=NAME { z }] { _PyAST_alias(a->v.Name.id,
- (b) ? ((expr_ty) b)->v.Name.id : NULL,
- EXTRA) }
+ | invalid_dotted_as_name
+ | a=dotted_name b=['as' z=NAME { z }] { _PyAST_alias(
+ a->v.Name.id, (b) ? ((expr_ty) b)->v.Name.id : NULL, EXTRA) }
+
dotted_name[expr_ty]:
| a=dotted_name '.' b=NAME { _PyPegen_join_names_with_dot(p, a, b) }
| NAME
@@ -441,18 +445,30 @@ try_stmt[stmt_ty]:
except_block[excepthandler_ty]:
| invalid_except_stmt_indent
- | 'except' e=expressions ':' b=block {
- _PyAST_ExceptHandler(e, NULL, b, EXTRA) }
- | 'except' e=expression 'as' t=NAME ':' b=block {
+ | 'except' e=expression ':' b=block {
+ _PyAST_ExceptHandler(e, NULL, b, EXTRA) }
+ | 'except' e=expression 'as' t=NAME ':' b=block {
_PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }
+ | 'except' e=expressions ':' b=block {
+ CHECK_VERSION(
+ excepthandler_ty,
+ 14,
+ "except expressions without parentheses are",
+ _PyAST_ExceptHandler(e, NULL, b, EXTRA)) }
| 'except' ':' b=block { _PyAST_ExceptHandler(NULL, NULL, b, EXTRA) }
| invalid_except_stmt
except_star_block[excepthandler_ty]:
| invalid_except_star_stmt_indent
- | 'except' '*' e=expressions ':' b=block {
+ | 'except' '*' e=expression ':' b=block {
_PyAST_ExceptHandler(e, NULL, b, EXTRA) }
| 'except' '*' e=expression 'as' t=NAME ':' b=block {
_PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) }
+ | 'except' '*' e=expressions ':' b=block {
+ CHECK_VERSION(
+ excepthandler_ty,
+ 14,
+ "except expressions without parentheses are",
+ _PyAST_ExceptHandler(e, NULL, b, EXTRA)) }
| invalid_except_star_stmt
finally_block[asdl_stmt_seq*]:
| invalid_finally_stmt
@@ -519,7 +535,7 @@ literal_pattern[pattern_ty]:
literal_expr[expr_ty]:
| signed_number !('+' | '-')
| complex_number
- | strings
+ | &(STRING|FSTRING_START|TSTRING_START) strings
| 'None' { _PyAST_Constant(Py_None, NULL, EXTRA) }
| 'True' { _PyAST_Constant(Py_True, NULL, EXTRA) }
| 'False' { _PyAST_Constant(Py_False, NULL, EXTRA) }
@@ -659,7 +675,7 @@ type_alias[stmt_ty]:
# Type parameter declaration
# --------------------------
-type_params[asdl_type_param_seq*]:
+type_params[asdl_type_param_seq*]:
| invalid_type_params
| '[' t=type_param_seq ']' {
CHECK_VERSION(asdl_type_param_seq *, 12, "Type parameter lists are", t) }
@@ -859,7 +875,7 @@ atom[expr_ty]:
| 'True' { _PyAST_Constant(Py_True, NULL, EXTRA) }
| 'False' { _PyAST_Constant(Py_False, NULL, EXTRA) }
| 'None' { _PyAST_Constant(Py_None, NULL, EXTRA) }
- | &(STRING|FSTRING_START) strings
+ | &(STRING|FSTRING_START|TSTRING_START) strings
| NUMBER
| &'(' (tuple | group | genexp)
| &'[' (list | listcomp)
@@ -935,7 +951,7 @@ fstring_middle[expr_ty]:
fstring_replacement_field[expr_ty]:
| '{' a=annotated_rhs debug_expr='='? conversion=[fstring_conversion] format=[fstring_full_format_spec] rbrace='}' {
_PyPegen_formatted_value(p, a, debug_expr, conversion, format, rbrace, EXTRA) }
- | invalid_replacement_field
+ | invalid_fstring_replacement_field
fstring_conversion[ResultTokenWithMetadata*]:
| conv_token="!" conv=NAME { _PyPegen_check_fstring_conversion(p, conv_token, conv) }
fstring_full_format_spec[ResultTokenWithMetadata*]:
@@ -946,8 +962,32 @@ fstring_format_spec[expr_ty]:
fstring[expr_ty]:
| a=FSTRING_START b=fstring_middle* c=FSTRING_END { _PyPegen_joined_str(p, a, (asdl_expr_seq*)b, c) }
+tstring_format_spec_replacement_field[expr_ty]:
+ | '{' a=annotated_rhs debug_expr='='? conversion=[fstring_conversion] format=[tstring_full_format_spec] rbrace='}' {
+ _PyPegen_formatted_value(p, a, debug_expr, conversion, format, rbrace, EXTRA) }
+ | invalid_tstring_replacement_field
+tstring_format_spec[expr_ty]:
+ | t=TSTRING_MIDDLE { _PyPegen_decoded_constant_from_token(p, t) }
+ | tstring_format_spec_replacement_field
+tstring_full_format_spec[ResultTokenWithMetadata*]:
+ | colon=':' spec=tstring_format_spec* { _PyPegen_setup_full_format_spec(p, colon, (asdl_expr_seq *) spec, EXTRA) }
+tstring_replacement_field[expr_ty]:
+ | '{' a=annotated_rhs debug_expr='='? conversion=[fstring_conversion] format=[tstring_full_format_spec] rbrace='}' {
+ _PyPegen_interpolation(p, a, debug_expr, conversion, format, rbrace, EXTRA) }
+ | invalid_tstring_replacement_field
+tstring_middle[expr_ty]:
+ | tstring_replacement_field
+ | t=TSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) }
+tstring[expr_ty] (memo):
+ | a=TSTRING_START b=tstring_middle* c=TSTRING_END {
+ CHECK_VERSION(
+ expr_ty,
+ 14,
+ "t-strings are",
+ _PyPegen_template_str(p, a, (asdl_expr_seq*)b, c)) }
+
string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) }
-strings[expr_ty] (memo): a[asdl_expr_seq*]=(fstring|string)+ { _PyPegen_concatenate_strings(p, a, EXTRA) }
+strings[expr_ty] (memo): a[asdl_expr_seq*]=(fstring|string|tstring)+ { _PyPegen_concatenate_strings(p, a, EXTRA) }
list[expr_ty]:
| '[' a=[star_named_expressions] ']' { _PyAST_List(a, Load, EXTRA) }
@@ -1212,6 +1252,8 @@ invalid_expression:
RAISE_SYNTAX_ERROR_KNOWN_LOCATION (a, "expected expression before 'if', but statement is given") }
| a='lambda' [lambda_params] b=':' &FSTRING_MIDDLE {
RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "f-string: lambda expressions are not allowed without parentheses") }
+ | a='lambda' [lambda_params] b=':' &TSTRING_MIDDLE {
+ RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "t-string: lambda expressions are not allowed without parentheses") }
invalid_named_expression(memo):
| a=expression ':=' expression {
@@ -1247,6 +1289,11 @@ invalid_ann_assign_target[expr_ty]:
| list
| tuple
| '(' a=invalid_ann_assign_target ')' { a }
+invalid_raise_stmt:
+ | a='raise' b='from' {
+ RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "did you forget an expression between 'raise' and 'from'?") }
+ | 'raise' expression a='from' {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "did you forget an expression after 'from'?") }
invalid_del_stmt:
| 'del' a=star_expressions {
RAISE_SYNTAX_ERROR_INVALID_TARGET(DEL_TARGETS, a) }
@@ -1265,7 +1312,7 @@ invalid_dict_comprehension:
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "dict unpacking cannot be used in dict comprehension") }
invalid_parameters:
| a="/" ',' {
- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "at least one argument must precede /") }
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "at least one parameter must precede /") }
| (slash_no_default | slash_with_default) param_maybe_default* a='/' {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "/ may appear only once") }
| slash_no_default? param_no_default* invalid_parameters_helper a=param_no_default {
@@ -1279,21 +1326,21 @@ invalid_parameters:
invalid_default:
| a='=' &(')'|',') { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "expected default value expression") }
invalid_star_etc:
- | a='*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "named arguments must follow bare *") }
+ | a='*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "named parameters must follow bare *") }
| '*' ',' TYPE_COMMENT { RAISE_SYNTAX_ERROR("bare * has associated type comment") }
- | '*' param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-positional argument cannot have default value") }
+ | '*' param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-positional parameter cannot have default value") }
| '*' (param_no_default | ',') param_maybe_default* a='*' (param_no_default | ',') {
- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "* argument may appear only once") }
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "* may appear only once") }
invalid_kwds:
- | '**' param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-keyword argument cannot have default value") }
- | '**' param ',' a=param { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "arguments cannot follow var-keyword argument") }
- | '**' param ',' a[Token*]=('*'|'**'|'/') { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "arguments cannot follow var-keyword argument") }
+ | '**' param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-keyword parameter cannot have default value") }
+ | '**' param ',' a=param { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "parameters cannot follow var-keyword parameter") }
+ | '**' param ',' a[Token*]=('*'|'**'|'/') { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "parameters cannot follow var-keyword parameter") }
invalid_parameters_helper: # This is only there to avoid type errors
| a=slash_with_default { _PyPegen_singleton_seq(p, a) }
| param_with_default+
invalid_lambda_parameters:
| a="/" ',' {
- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "at least one argument must precede /") }
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "at least one parameter must precede /") }
| (lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* a='/' {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "/ may appear only once") }
| lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper a=lambda_param_no_default {
@@ -1308,14 +1355,14 @@ invalid_lambda_parameters_helper:
| a=lambda_slash_with_default { _PyPegen_singleton_seq(p, a) }
| lambda_param_with_default+
invalid_lambda_star_etc:
- | '*' (':' | ',' (':' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") }
- | '*' lambda_param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-positional argument cannot have default value") }
+ | '*' (':' | ',' (':' | '**')) { RAISE_SYNTAX_ERROR("named parameters must follow bare *") }
+ | '*' lambda_param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-positional parameter cannot have default value") }
| '*' (lambda_param_no_default | ',') lambda_param_maybe_default* a='*' (lambda_param_no_default | ',') {
- RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "* argument may appear only once") }
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "* may appear only once") }
invalid_lambda_kwds:
- | '**' lambda_param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-keyword argument cannot have default value") }
- | '**' lambda_param ',' a=lambda_param { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "arguments cannot follow var-keyword argument") }
- | '**' lambda_param ',' a[Token*]=('*'|'**'|'/') { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "arguments cannot follow var-keyword argument") }
+ | '**' lambda_param a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "var-keyword parameter cannot have default value") }
+ | '**' lambda_param ',' a=lambda_param { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "parameters cannot follow var-keyword parameter") }
+ | '**' lambda_param ',' a[Token*]=('*'|'**'|'/') { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "parameters cannot follow var-keyword parameter") }
invalid_double_type_comments:
| TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT {
RAISE_SYNTAX_ERROR("Cannot have two type comments on def") }
@@ -1339,13 +1386,21 @@ invalid_group:
invalid_import:
| a='import' ','.dotted_name+ 'from' dotted_name {
RAISE_SYNTAX_ERROR_STARTING_FROM(a, "Did you mean to use 'from ... import ...' instead?") }
- | 'import' token=NEWLINE {
+ | 'import' token=NEWLINE {
RAISE_SYNTAX_ERROR_STARTING_FROM(token, "Expected one or more names after 'import'") }
+invalid_dotted_as_name:
+ | dotted_name 'as' !(NAME (',' | ')' | NEWLINE)) a=expression {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a,
+ "cannot use %s as import target", _PyPegen_get_expr_name(a)) }
+invalid_import_from_as_name:
+ | NAME 'as' !(NAME (',' | ')' | NEWLINE)) a=expression {
+ RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a,
+ "cannot use %s as import target", _PyPegen_get_expr_name(a)) }
invalid_import_from_targets:
| import_from_as_names ',' NEWLINE {
RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") }
- | token=NEWLINE {
+ | token=NEWLINE {
RAISE_SYNTAX_ERROR_STARTING_FROM(token, "Expected one or more names after 'import'") }
invalid_with_stmt:
@@ -1370,7 +1425,7 @@ invalid_except_stmt:
RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized when using 'as'") }
| a='except' expression ['as' NAME ] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| a='except' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
- | 'except' expression 'as' a=expression {
+ | 'except' expression 'as' a=expression ':' block {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
a, "cannot use except statement with %s", _PyPegen_get_expr_name(a)) }
invalid_except_star_stmt:
@@ -1378,7 +1433,7 @@ invalid_except_star_stmt:
RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized when using 'as'") }
| a='except' '*' expression ['as' NAME ] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") }
| a='except' '*' (NEWLINE | ':') { RAISE_SYNTAX_ERROR("expected one or more exception types") }
- | 'except' '*' expression 'as' a=expression {
+ | 'except' '*' expression 'as' a=expression ':' block {
RAISE_SYNTAX_ERROR_KNOWN_LOCATION(
a, "cannot use except* statement with %s", _PyPegen_get_expr_name(a)) }
invalid_finally_stmt:
@@ -1454,17 +1509,17 @@ invalid_starred_expression_unpacking:
invalid_starred_expression:
| '*' { RAISE_SYNTAX_ERROR("Invalid star expression") }
-invalid_replacement_field:
+invalid_fstring_replacement_field:
| '{' a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '='") }
| '{' a='!' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '!'") }
| '{' a=':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before ':'") }
| '{' a='}' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "f-string: valid expression required before '}'") }
- | '{' !annotated_rhs { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting a valid expression after '{'")}
+ | '{' !annotated_rhs { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting a valid expression after '{'") }
| '{' annotated_rhs !('=' | '!' | ':' | '}') {
PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '=', or '!', or ':', or '}'") }
| '{' annotated_rhs '=' !('!' | ':' | '}') {
PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '!', or ':', or '}'") }
- | '{' annotated_rhs '='? invalid_conversion_character
+ | '{' annotated_rhs '='? invalid_fstring_conversion_character
| '{' annotated_rhs '='? ['!' NAME] !(':' | '}') {
PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting ':' or '}'") }
| '{' annotated_rhs '='? ['!' NAME] ':' fstring_format_spec* !'}' {
@@ -1472,10 +1527,32 @@ invalid_replacement_field:
| '{' annotated_rhs '='? ['!' NAME] !'}' {
PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: expecting '}'") }
-invalid_conversion_character:
+invalid_fstring_conversion_character:
| '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: missing conversion character") }
| '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("f-string: invalid conversion character") }
+invalid_tstring_replacement_field:
+ | '{' a='=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "t-string: valid expression required before '='") }
+ | '{' a='!' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "t-string: valid expression required before '!'") }
+ | '{' a=':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "t-string: valid expression required before ':'") }
+ | '{' a='}' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "t-string: valid expression required before '}'") }
+ | '{' !annotated_rhs { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: expecting a valid expression after '{'") }
+ | '{' annotated_rhs !('=' | '!' | ':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: expecting '=', or '!', or ':', or '}'") }
+ | '{' annotated_rhs '=' !('!' | ':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: expecting '!', or ':', or '}'") }
+ | '{' annotated_rhs '='? invalid_tstring_conversion_character
+ | '{' annotated_rhs '='? ['!' NAME] !(':' | '}') {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: expecting ':' or '}'") }
+ | '{' annotated_rhs '='? ['!' NAME] ':' fstring_format_spec* !'}' {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: expecting '}', or format specs") }
+ | '{' annotated_rhs '='? ['!' NAME] !'}' {
+ PyErr_Occurred() ? NULL : RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: expecting '}'") }
+
+invalid_tstring_conversion_character:
+ | '!' &(':' | '}') { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: missing conversion character") }
+ | '!' !NAME { RAISE_SYNTAX_ERROR_ON_NEXT_TOKEN("t-string: invalid conversion character") }
+
invalid_arithmetic:
| sum ('+'|'-'|'*'|'/'|'%'|'//'|'@') a='not' b=inversion { RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "'not' after an operator must be parenthesized") }
invalid_factor:
@@ -1484,5 +1561,5 @@ invalid_factor:
invalid_type_params:
| '[' token=']' {
RAISE_SYNTAX_ERROR_STARTING_FROM(
- token,
+ token,
"Type parameter list cannot be empty")}