diff options
43 files changed, 1280 insertions, 939 deletions
diff --git a/.github/workflows/posix-deps-apt.sh b/.github/workflows/posix-deps-apt.sh index d5538cd9367..7773222af5d 100755 --- a/.github/workflows/posix-deps-apt.sh +++ b/.github/workflows/posix-deps-apt.sh @@ -17,6 +17,7 @@ apt-get -yq install \ libreadline6-dev \ libsqlite3-dev \ libssl-dev \ + libzstd-dev \ lzma \ lzma-dev \ strace \ diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst index 3bbc990b632..49dbc8d71cc 100644 --- a/Doc/c-api/arg.rst +++ b/Doc/c-api/arg.rst @@ -685,6 +685,7 @@ Building values ``p`` (:class:`bool`) [int] Convert a C :c:expr:`int` to a Python :class:`bool` object. + .. versionadded:: 3.14 ``c`` (:class:`bytes` of length 1) [char] diff --git a/Doc/c-api/exceptions.rst b/Doc/c-api/exceptions.rst index c8e1b5c2461..885dbeb7530 100644 --- a/Doc/c-api/exceptions.rst +++ b/Doc/c-api/exceptions.rst @@ -982,6 +982,7 @@ the variables: .. index:: single: PyExc_BaseException (C var) + single: PyExc_BaseExceptionGroup (C var) single: PyExc_Exception (C var) single: PyExc_ArithmeticError (C var) single: PyExc_AssertionError (C var) @@ -1041,6 +1042,8 @@ the variables: +=========================================+=================================+==========+ | :c:data:`PyExc_BaseException` | :exc:`BaseException` | [1]_ | +-----------------------------------------+---------------------------------+----------+ +| :c:data:`PyExc_BaseExceptionGroup` | :exc:`BaseExceptionGroup` | [1]_ | ++-----------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_Exception` | :exc:`Exception` | [1]_ | +-----------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_ArithmeticError` | :exc:`ArithmeticError` | [1]_ | @@ -1164,6 +1167,9 @@ the variables: .. versionadded:: 3.6 :c:data:`PyExc_ModuleNotFoundError`. +.. versionadded:: 3.11 + :c:data:`PyExc_BaseExceptionGroup`. + These are compatibility aliases to :c:data:`PyExc_OSError`: .. index:: @@ -1207,6 +1213,7 @@ the variables: single: PyExc_Warning (C var) single: PyExc_BytesWarning (C var) single: PyExc_DeprecationWarning (C var) + single: PyExc_EncodingWarning (C var) single: PyExc_FutureWarning (C var) single: PyExc_ImportWarning (C var) single: PyExc_PendingDeprecationWarning (C var) @@ -1225,6 +1232,8 @@ the variables: +------------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_DeprecationWarning` | :exc:`DeprecationWarning` | | +------------------------------------------+---------------------------------+----------+ +| :c:data:`PyExc_EncodingWarning` | :exc:`EncodingWarning` | | ++------------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_FutureWarning` | :exc:`FutureWarning` | | +------------------------------------------+---------------------------------+----------+ | :c:data:`PyExc_ImportWarning` | :exc:`ImportWarning` | | @@ -1245,6 +1254,9 @@ the variables: .. versionadded:: 3.2 :c:data:`PyExc_ResourceWarning`. +.. versionadded:: 3.10 + :c:data:`PyExc_EncodingWarning`. + Notes: .. [3] diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index 45f50ba5f97..07fdcfd9729 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -191,6 +191,22 @@ access to internal read-only data of Unicode objects: .. versionadded:: 3.2 +.. c:function:: Py_hash_t PyUnstable_Unicode_GET_CACHED_HASH(PyObject *str) + + If the hash of *str*, as returned by :c:func:`PyObject_Hash`, has been + cached and is immediately available, return it. + Otherwise, return ``-1`` *without* setting an exception. + + If *str* is not a string (that is, if ``PyUnicode_Check(obj)`` + is false), the behavior is undefined. + + This function never fails with an exception. + + Note that there are no guarantees on when an object's hash is cached, + and the (non-)existence of a cached hash does not imply that the string has + any other properties. + + Unicode Character Properties """""""""""""""""""""""""""" diff --git a/Doc/conf.py b/Doc/conf.py index 7fadad66cb3..b08f5452901 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -234,6 +234,7 @@ nitpick_ignore += [ ('c:data', 'PyExc_AssertionError'), ('c:data', 'PyExc_AttributeError'), ('c:data', 'PyExc_BaseException'), + ('c:data', 'PyExc_BaseExceptionGroup'), ('c:data', 'PyExc_BlockingIOError'), ('c:data', 'PyExc_BrokenPipeError'), ('c:data', 'PyExc_BufferError'), @@ -287,6 +288,7 @@ nitpick_ignore += [ # C API: Standard Python warning classes ('c:data', 'PyExc_BytesWarning'), ('c:data', 'PyExc_DeprecationWarning'), + ('c:data', 'PyExc_EncodingWarning'), ('c:data', 'PyExc_FutureWarning'), ('c:data', 'PyExc_ImportWarning'), ('c:data', 'PyExc_PendingDeprecationWarning'), diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 8e74c6c9dee..2ee4450698a 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -714,10 +714,16 @@ item in the :attr:`~Structure._fields_` tuples:: ... ("second_16", c_int, 16)] ... >>> print(Int.first_16) - <Field type=c_long, ofs=0:0, bits=16> + <ctypes.CField 'first_16' type=c_int, ofs=0, bit_size=16, bit_offset=0> >>> print(Int.second_16) - <Field type=c_long, ofs=0:16, bits=16> - >>> + <ctypes.CField 'second_16' type=c_int, ofs=0, bit_size=16, bit_offset=16> + +It is important to note that bit field allocation and layout in memory are not +defined as a C standard; their implementation is compiler-specific. +By default, Python will attempt to match the behavior of a "native" compiler +for the current platform. +See the :attr:`~Structure._layout_` attribute for details on the default +behavior and how to change it. .. _ctypes-arrays: diff --git a/Doc/tutorial/modules.rst b/Doc/tutorial/modules.rst index de7aa0e2342..47bf7547b4a 100644 --- a/Doc/tutorial/modules.rst +++ b/Doc/tutorial/modules.rst @@ -27,14 +27,16 @@ called :file:`fibo.py` in the current directory with the following contents:: # Fibonacci numbers module - def fib(n): # write Fibonacci series up to n + def fib(n): + """Write Fibonacci series up to n.""" a, b = 0, 1 while a < n: print(a, end=' ') a, b = b, a+b print() - def fib2(n): # return Fibonacci series up to n + def fib2(n): + """Return Fibonacci series up to n.""" result = [] a, b = 0, 1 while a < n: diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst index daf3e8fb6c2..2f8335a895c 100644 --- a/Doc/whatsnew/3.15.rst +++ b/Doc/whatsnew/3.15.rst @@ -134,6 +134,13 @@ shelve (Contributed by Andrea Oliveri in :gh:`134004`.) +sqlite3 +------- + +* Support SQL keyword completion in the :mod:`sqlite3` command-line interface. + (Contributed by Long Tan in :gh:`133393`.) + + ssl --- @@ -304,6 +311,10 @@ New features input string contains non-ASCII characters. (Contributed by Victor Stinner in :gh:`133968`.) +* Add :c:type:`PyUnstable_Unicode_GET_CACHED_HASH` to get the cached hash of + a string. See the documentation for caveats. + (Contributed by Petr Viktorin in :gh:`131510`) + Porting to Python 3.15 ---------------------- diff --git a/Grammar/python.gram b/Grammar/python.gram index de435537095..a5ab769910b 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -184,6 +184,7 @@ return_stmt[stmt_ty]: | 'return' a=[star_expressions] { _PyAST_Return(a, EXTRA) } raise_stmt[stmt_ty]: + | invalid_raise_stmt | 'raise' a=expression b=['from' z=expression { z }] { _PyAST_Raise(a, b, EXTRA) } | 'raise' { _PyAST_Raise(NULL, NULL, EXTRA) } @@ -1287,6 +1288,11 @@ invalid_ann_assign_target[expr_ty]: | list | tuple | '(' a=invalid_ann_assign_target ')' { a } +invalid_raise_stmt: + | a='raise' b='from' { + RAISE_SYNTAX_ERROR_KNOWN_RANGE(a, b, "did you forget an expression between 'raise' and 'from'?") } + | 'raise' expression a='from' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "did you forget an expression after 'from'?") } invalid_del_stmt: | 'del' a=star_expressions { RAISE_SYNTAX_ERROR_INVALID_TARGET(DEL_TARGETS, a) } diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 3d0414f5291..7c1aac9696d 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -300,6 +300,17 @@ static inline Py_ssize_t PyUnicode_GET_LENGTH(PyObject *op) { } #define PyUnicode_GET_LENGTH(op) PyUnicode_GET_LENGTH(_PyObject_CAST(op)) +/* Returns the cached hash, or -1 if not cached yet. */ +static inline Py_hash_t +PyUnstable_Unicode_GET_CACHED_HASH(PyObject *op) { + assert(PyUnicode_Check(op)); +#ifdef Py_GIL_DISABLED + return _Py_atomic_load_ssize_relaxed(&_PyASCIIObject_CAST(op)->hash); +#else + return _PyASCIIObject_CAST(op)->hash; +#endif +} + /* Write into the canonical representation, this function does not do any sanity checks and is intended for usage in loops. The caller should cache the kind and data pointers obtained from other function calls. diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 092feeb40b0..239177deb4a 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -353,7 +353,8 @@ PyAPI_FUNC(_PyStackRef) _PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyS extern int _PyRunRemoteDebugger(PyThreadState *tstate); #endif -_PyStackRef _PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index); +PyAPI_FUNC(_PyStackRef) +_PyForIter_VirtualIteratorNext(PyThreadState* tstate, struct _PyInterpreterFrame* frame, _PyStackRef iter, _PyStackRef *index_ptr); #ifdef __cplusplus } diff --git a/Include/internal/pycore_stackref.h b/Include/internal/pycore_stackref.h index f2ecc30b053..87914767252 100644 --- a/Include/internal/pycore_stackref.h +++ b/Include/internal/pycore_stackref.h @@ -62,14 +62,15 @@ PyAPI_FUNC(void) _Py_stackref_record_borrow(_PyStackRef ref, const char *filenam extern void _Py_stackref_associate(PyInterpreterState *interp, PyObject *obj, _PyStackRef ref); static const _PyStackRef PyStackRef_NULL = { .index = 0 }; +static const _PyStackRef PyStackRef_ERROR = { .index = 2 }; // Use the first 3 even numbers for None, True and False. // Odd numbers are reserved for (tagged) integers -#define PyStackRef_None ((_PyStackRef){ .index = 2 } ) -#define PyStackRef_False ((_PyStackRef){ .index = 4 }) -#define PyStackRef_True ((_PyStackRef){ .index = 6 }) +#define PyStackRef_None ((_PyStackRef){ .index = 4 } ) +#define PyStackRef_False ((_PyStackRef){ .index = 6 }) +#define PyStackRef_True ((_PyStackRef){ .index = 8 }) -#define INITIAL_STACKREF_INDEX 8 +#define INITIAL_STACKREF_INDEX 10 static inline int PyStackRef_IsNull(_PyStackRef ref) @@ -77,6 +78,19 @@ PyStackRef_IsNull(_PyStackRef ref) return ref.index == 0; } +static inline bool +PyStackRef_IsError(_PyStackRef ref) +{ + return ref.index == 2; +} + +static inline bool +PyStackRef_IsValid(_PyStackRef ref) +{ + /* Invalid values are ERROR and NULL */ + return !PyStackRef_IsError(ref) && !PyStackRef_IsNull(ref); +} + static inline int PyStackRef_IsTrue(_PyStackRef ref) { @@ -104,6 +118,7 @@ PyStackRef_IsTaggedInt(_PyStackRef ref) static inline PyObject * _PyStackRef_AsPyObjectBorrow(_PyStackRef ref, const char *filename, int linenumber) { + assert(!PyStackRef_IsError(ref)); assert(!PyStackRef_IsTaggedInt(ref)); _Py_stackref_record_borrow(ref, filename, linenumber); return _Py_stackref_get_object(ref); @@ -155,6 +170,7 @@ _PyStackRef_CLOSE(_PyStackRef ref, const char *filename, int linenumber) static inline void _PyStackRef_XCLOSE(_PyStackRef ref, const char *filename, int linenumber) { + assert(!PyStackRef_IsError(ref)); if (PyStackRef_IsNull(ref)) { return; } @@ -165,6 +181,7 @@ _PyStackRef_XCLOSE(_PyStackRef ref, const char *filename, int linenumber) static inline _PyStackRef _PyStackRef_DUP(_PyStackRef ref, const char *filename, int linenumber) { + assert(!PyStackRef_IsError(ref)); if (PyStackRef_IsTaggedInt(ref)) { return ref; } @@ -241,9 +258,25 @@ PyStackRef_IsNullOrInt(_PyStackRef ref); #else #define Py_INT_TAG 3 +#define Py_TAG_INVALID 2 #define Py_TAG_REFCNT 1 #define Py_TAG_BITS 3 +static const _PyStackRef PyStackRef_ERROR = { .bits = Py_TAG_INVALID }; + +static inline bool +PyStackRef_IsError(_PyStackRef ref) +{ + return ref.bits == Py_TAG_INVALID; +} + +static inline bool +PyStackRef_IsValid(_PyStackRef ref) +{ + /* Invalid values are ERROR and NULL */ + return ref.bits >= Py_INT_TAG; +} + static inline bool PyStackRef_IsTaggedInt(_PyStackRef i) { @@ -284,6 +317,7 @@ PyStackRef_IncrementTaggedIntNoOverflow(_PyStackRef ref) static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED}; + #define PyStackRef_IsNull(stackref) ((stackref).bits == PyStackRef_NULL.bits) #define PyStackRef_True ((_PyStackRef){.bits = ((uintptr_t)&_Py_TrueStruct) | Py_TAG_DEFERRED }) #define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) | Py_TAG_DEFERRED }) diff --git a/Lib/os.py b/Lib/os.py index 266e40b56f6..643a7b2f581 100644 --- a/Lib/os.py +++ b/Lib/os.py @@ -118,6 +118,7 @@ if _exists("_have_functions"): _add("HAVE_FCHMODAT", "chmod") _add("HAVE_FCHOWNAT", "chown") _add("HAVE_FSTATAT", "stat") + _add("HAVE_LSTAT", "lstat") _add("HAVE_FUTIMESAT", "utime") _add("HAVE_LINKAT", "link") _add("HAVE_MKDIRAT", "mkdir") diff --git a/Lib/sqlite3/__main__.py b/Lib/sqlite3/__main__.py index c2fa23c46cf..9e74b49ee82 100644 --- a/Lib/sqlite3/__main__.py +++ b/Lib/sqlite3/__main__.py @@ -12,6 +12,8 @@ from code import InteractiveConsole from textwrap import dedent from _colorize import get_theme, theme_no_color +from ._completer import completer + def execute(c, sql, suppress_errors=True, theme=theme_no_color): """Helper that wraps execution of SQL code. @@ -136,12 +138,9 @@ def main(*args): execute(con, args.sql, suppress_errors=False, theme=theme) else: # No SQL provided; start the REPL. - console = SqliteInteractiveConsole(con, use_color=True) - try: - import readline # noqa: F401 - except ImportError: - pass - console.interact(banner, exitmsg="") + with completer(): + console = SqliteInteractiveConsole(con, use_color=True) + console.interact(banner, exitmsg="") finally: con.close() diff --git a/Lib/sqlite3/_completer.py b/Lib/sqlite3/_completer.py new file mode 100644 index 00000000000..f21ef69cad6 --- /dev/null +++ b/Lib/sqlite3/_completer.py @@ -0,0 +1,42 @@ +from contextlib import contextmanager + +try: + from _sqlite3 import SQLITE_KEYWORDS +except ImportError: + SQLITE_KEYWORDS = () + +_completion_matches = [] + + +def _complete(text, state): + global _completion_matches + + if state == 0: + text_upper = text.upper() + _completion_matches = [c for c in SQLITE_KEYWORDS if c.startswith(text_upper)] + try: + return _completion_matches[state] + " " + except IndexError: + return None + + +@contextmanager +def completer(): + try: + import readline + except ImportError: + yield + return + + old_completer = readline.get_completer() + try: + readline.set_completer(_complete) + if readline.backend == "editline": + # libedit uses "^I" instead of "tab" + command_string = "bind ^I rl_complete" + else: + command_string = "tab: complete" + readline.parse_and_bind(command_string) + yield + finally: + readline.set_completer(old_completer) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index f9b66b88d3d..48e74adcce3 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -945,6 +945,31 @@ def check_sizeof(test, o, size): % (type(o), result, size) test.assertEqual(result, size, msg) +def subTests(arg_names, arg_values, /, *, _do_cleanups=False): + """Run multiple subtests with different parameters. + """ + single_param = False + if isinstance(arg_names, str): + arg_names = arg_names.replace(',',' ').split() + if len(arg_names) == 1: + single_param = True + arg_values = tuple(arg_values) + def decorator(func): + if isinstance(func, type): + raise TypeError('subTests() can only decorate methods, not classes') + @functools.wraps(func) + def wrapper(self, /, *args, **kwargs): + for values in arg_values: + if single_param: + values = (values,) + subtest_kwargs = dict(zip(arg_names, values)) + with self.subTest(**subtest_kwargs): + func(self, *args, **kwargs, **subtest_kwargs) + if _do_cleanups: + self.doCleanups() + return wrapper + return decorator + #======================================================================= # Decorator/context manager for running a code in a different locale, # correctly resetting it afterwards. diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index cb6eae48414..a292ebcc7f4 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -1183,6 +1183,17 @@ class TestUopsOptimization(unittest.TestCase): self.assertIsNotNone(ex) self.assertIn("_RETURN_GENERATOR", get_opnames(ex)) + def test_for_iter(self): + def testfunc(n): + t = 0 + for i in set(range(n)): + t += i + return t + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, TIER2_THRESHOLD * (TIER2_THRESHOLD - 1) // 2) + self.assertIsNotNone(ex) + self.assertIn("_FOR_ITER_TIER_TWO", get_opnames(ex)) + @unittest.skip("Tracing into generators currently isn't supported.") def test_for_iter_gen(self): def gen(n): diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py index c8be4f3faa9..6a9c60f3a6d 100644 --- a/Lib/test/test_capi/test_unicode.py +++ b/Lib/test/test_capi/test_unicode.py @@ -1739,6 +1739,20 @@ class CAPITest(unittest.TestCase): # Check that the second call returns the same result self.assertEqual(getargs_s_hash(s), chr(k).encode() * (i + 1)) + @support.cpython_only + @unittest.skipIf(_testcapi is None, 'need _testcapi module') + def test_GET_CACHED_HASH(self): + from _testcapi import unicode_GET_CACHED_HASH + content_bytes = b'some new string' + # avoid parser interning & constant folding + obj = str(content_bytes, 'ascii') + # impl detail: fresh strings do not have cached hash + self.assertEqual(unicode_GET_CACHED_HASH(obj), -1) + # impl detail: adding string to a dict caches its hash + {obj: obj} + # impl detail: ASCII string hashes are equal to bytes ones + self.assertEqual(unicode_GET_CACHED_HASH(obj), hash(content_bytes)) + class PyUnicodeWriterTest(unittest.TestCase): def create_writer(self, size): diff --git a/Lib/test/test_http_cookiejar.py b/Lib/test/test_http_cookiejar.py index 6bc33b15ec3..04cb440cd4c 100644 --- a/Lib/test/test_http_cookiejar.py +++ b/Lib/test/test_http_cookiejar.py @@ -4,6 +4,7 @@ import os import stat import sys import re +from test import support from test.support import os_helper from test.support import warnings_helper import time @@ -105,8 +106,7 @@ class DateTimeTests(unittest.TestCase): self.assertEqual(http2time(s.lower()), test_t, s.lower()) self.assertEqual(http2time(s.upper()), test_t, s.upper()) - def test_http2time_garbage(self): - for test in [ + @support.subTests('test', [ '', 'Garbage', 'Mandag 16. September 1996', @@ -121,10 +121,9 @@ class DateTimeTests(unittest.TestCase): '08-01-3697739', '09 Feb 19942632 22:23:32 GMT', 'Wed, 09 Feb 1994834 22:23:32 GMT', - ]: - self.assertIsNone(http2time(test), - "http2time(%s) is not None\n" - "http2time(test) %s" % (test, http2time(test))) + ]) + def test_http2time_garbage(self, test): + self.assertIsNone(http2time(test)) def test_http2time_redos_regression_actually_completes(self): # LOOSE_HTTP_DATE_RE was vulnerable to malicious input which caused catastrophic backtracking (REDoS). @@ -149,9 +148,7 @@ class DateTimeTests(unittest.TestCase): self.assertEqual(parse_date("1994-02-03 19:45:29 +0530"), (1994, 2, 3, 14, 15, 29)) - def test_iso2time_formats(self): - # test iso2time for supported dates. - tests = [ + @support.subTests('s', [ '1994-02-03 00:00:00 -0000', # ISO 8601 format '1994-02-03 00:00:00 +0000', # ISO 8601 format '1994-02-03 00:00:00', # zone is optional @@ -164,16 +161,15 @@ class DateTimeTests(unittest.TestCase): # A few tests with extra space at various places ' 1994-02-03 ', ' 1994-02-03T00:00:00 ', - ] - + ]) + def test_iso2time_formats(self, s): + # test iso2time for supported dates. test_t = 760233600 # assume broken POSIX counting of seconds - for s in tests: - self.assertEqual(iso2time(s), test_t, s) - self.assertEqual(iso2time(s.lower()), test_t, s.lower()) - self.assertEqual(iso2time(s.upper()), test_t, s.upper()) + self.assertEqual(iso2time(s), test_t, s) + self.assertEqual(iso2time(s.lower()), test_t, s.lower()) + self.assertEqual(iso2time(s.upper()), test_t, s.upper()) - def test_iso2time_garbage(self): - for test in [ + @support.subTests('test', [ '', 'Garbage', 'Thursday, 03-Feb-94 00:00:00 GMT', @@ -186,9 +182,9 @@ class DateTimeTests(unittest.TestCase): '01-01-1980 00:00:62', '01-01-1980T00:00:62', '19800101T250000Z', - ]: - self.assertIsNone(iso2time(test), - "iso2time(%r)" % test) + ]) + def test_iso2time_garbage(self, test): + self.assertIsNone(iso2time(test)) def test_iso2time_performance_regression(self): # If ISO_DATE_RE regresses to quadratic complexity, this test will take a very long time to succeed. @@ -199,24 +195,23 @@ class DateTimeTests(unittest.TestCase): class HeaderTests(unittest.TestCase): - def test_parse_ns_headers(self): - # quotes should be stripped - expected = [[('foo', 'bar'), ('expires', 2209069412), ('version', '0')]] - for hdr in [ + @support.subTests('hdr', [ 'foo=bar; expires=01 Jan 2040 22:23:32 GMT', 'foo=bar; expires="01 Jan 2040 22:23:32 GMT"', - ]: - self.assertEqual(parse_ns_headers([hdr]), expected) - - def test_parse_ns_headers_version(self): - + ]) + def test_parse_ns_headers(self, hdr): # quotes should be stripped - expected = [[('foo', 'bar'), ('version', '1')]] - for hdr in [ + expected = [[('foo', 'bar'), ('expires', 2209069412), ('version', '0')]] + self.assertEqual(parse_ns_headers([hdr]), expected) + + @support.subTests('hdr', [ 'foo=bar; version="1"', 'foo=bar; Version="1"', - ]: - self.assertEqual(parse_ns_headers([hdr]), expected) + ]) + def test_parse_ns_headers_version(self, hdr): + # quotes should be stripped + expected = [[('foo', 'bar'), ('version', '1')]] + self.assertEqual(parse_ns_headers([hdr]), expected) def test_parse_ns_headers_special_names(self): # names such as 'expires' are not special in first name=value pair @@ -226,8 +221,7 @@ class HeaderTests(unittest.TestCase): expected = [[("expires", "01 Jan 2040 22:23:32 GMT"), ("version", "0")]] self.assertEqual(parse_ns_headers([hdr]), expected) - def test_join_header_words(self): - for src, expected in [ + @support.subTests('src,expected', [ ([[("foo", None), ("bar", "baz")]], "foo; bar=baz"), (([]), ""), (([[]]), ""), @@ -237,12 +231,11 @@ class HeaderTests(unittest.TestCase): 'n; foo="foo;_", bar=foo_bar'), ([[("n", "m"), ("foo", None)], [("bar", "foo_bar")]], 'n=m; foo, bar=foo_bar'), - ]: - with self.subTest(src=src): - self.assertEqual(join_header_words(src), expected) + ]) + def test_join_header_words(self, src, expected): + self.assertEqual(join_header_words(src), expected) - def test_split_header_words(self): - tests = [ + @support.subTests('arg,expect', [ ("foo", [[("foo", None)]]), ("foo=bar", [[("foo", "bar")]]), (" foo ", [[("foo", None)]]), @@ -259,24 +252,22 @@ class HeaderTests(unittest.TestCase): (r'foo; bar=baz, spam=, foo="\,\;\"", bar= ', [[("foo", None), ("bar", "baz")], [("spam", "")], [("foo", ',;"')], [("bar", "")]]), - ] - - for arg, expect in tests: - try: - result = split_header_words([arg]) - except: - import traceback, io - f = io.StringIO() - traceback.print_exc(None, f) - result = "(error -- traceback follows)\n\n%s" % f.getvalue() - self.assertEqual(result, expect, """ + ]) + def test_split_header_words(self, arg, expect): + try: + result = split_header_words([arg]) + except: + import traceback, io + f = io.StringIO() + traceback.print_exc(None, f) + result = "(error -- traceback follows)\n\n%s" % f.getvalue() + self.assertEqual(result, expect, """ When parsing: '%s' Expected: '%s' Got: '%s' """ % (arg, expect, result)) - def test_roundtrip(self): - tests = [ + @support.subTests('arg,expect', [ ("foo", "foo"), ("foo=bar", "foo=bar"), (" foo ", "foo"), @@ -309,12 +300,11 @@ Got: '%s' ('n; foo="foo;_", bar="foo,_"', 'n; foo="foo;_", bar="foo,_"'), - ] - - for arg, expect in tests: - input = split_header_words([arg]) - res = join_header_words(input) - self.assertEqual(res, expect, """ + ]) + def test_roundtrip(self, arg, expect): + input = split_header_words([arg]) + res = join_header_words(input) + self.assertEqual(res, expect, """ When parsing: '%s' Expected: '%s' Got: '%s' @@ -516,14 +506,7 @@ class CookieTests(unittest.TestCase): ## just the 7 special TLD's listed in their spec. And folks rely on ## that... - def test_domain_return_ok(self): - # test optimization: .domain_return_ok() should filter out most - # domains in the CookieJar before we try to access them (because that - # may require disk access -- in particular, with MSIECookieJar) - # This is only a rough check for performance reasons, so it's not too - # critical as long as it's sufficiently liberal. - pol = DefaultCookiePolicy() - for url, domain, ok in [ + @support.subTests('url,domain,ok', [ ("http://foo.bar.com/", "blah.com", False), ("http://foo.bar.com/", "rhubarb.blah.com", False), ("http://foo.bar.com/", "rhubarb.foo.bar.com", False), @@ -543,11 +526,18 @@ class CookieTests(unittest.TestCase): ("http://foo/", ".local", True), ("http://barfoo.com", ".foo.com", False), ("http://barfoo.com", "foo.com", False), - ]: - request = urllib.request.Request(url) - r = pol.domain_return_ok(domain, request) - if ok: self.assertTrue(r) - else: self.assertFalse(r) + ]) + def test_domain_return_ok(self, url, domain, ok): + # test optimization: .domain_return_ok() should filter out most + # domains in the CookieJar before we try to access them (because that + # may require disk access -- in particular, with MSIECookieJar) + # This is only a rough check for performance reasons, so it's not too + # critical as long as it's sufficiently liberal. + pol = DefaultCookiePolicy() + request = urllib.request.Request(url) + r = pol.domain_return_ok(domain, request) + if ok: self.assertTrue(r) + else: self.assertFalse(r) def test_missing_value(self): # missing = sign in Cookie: header is regarded by Mozilla as a missing @@ -581,10 +571,7 @@ class CookieTests(unittest.TestCase): self.assertEqual(interact_netscape(c, "http://www.acme.com/foo/"), '"spam"; eggs') - def test_rfc2109_handling(self): - # RFC 2109 cookies are handled as RFC 2965 or Netscape cookies, - # dependent on policy settings - for rfc2109_as_netscape, rfc2965, version in [ + @support.subTests('rfc2109_as_netscape,rfc2965,version', [ # default according to rfc2965 if not explicitly specified (None, False, 0), (None, True, 1), @@ -593,24 +580,27 @@ class CookieTests(unittest.TestCase): (False, True, 1), (True, False, 0), (True, True, 0), - ]: - policy = DefaultCookiePolicy( - rfc2109_as_netscape=rfc2109_as_netscape, - rfc2965=rfc2965) - c = CookieJar(policy) - interact_netscape(c, "http://www.example.com/", "ni=ni; Version=1") - try: - cookie = c._cookies["www.example.com"]["/"]["ni"] - except KeyError: - self.assertIsNone(version) # didn't expect a stored cookie - else: - self.assertEqual(cookie.version, version) - # 2965 cookies are unaffected - interact_2965(c, "http://www.example.com/", - "foo=bar; Version=1") - if rfc2965: - cookie2965 = c._cookies["www.example.com"]["/"]["foo"] - self.assertEqual(cookie2965.version, 1) + ]) + def test_rfc2109_handling(self, rfc2109_as_netscape, rfc2965, version): + # RFC 2109 cookies are handled as RFC 2965 or Netscape cookies, + # dependent on policy settings + policy = DefaultCookiePolicy( + rfc2109_as_netscape=rfc2109_as_netscape, + rfc2965=rfc2965) + c = CookieJar(policy) + interact_netscape(c, "http://www.example.com/", "ni=ni; Version=1") + try: + cookie = c._cookies["www.example.com"]["/"]["ni"] + except KeyError: + self.assertIsNone(version) # didn't expect a stored cookie + else: + self.assertEqual(cookie.version, version) + # 2965 cookies are unaffected + interact_2965(c, "http://www.example.com/", + "foo=bar; Version=1") + if rfc2965: + cookie2965 = c._cookies["www.example.com"]["/"]["foo"] + self.assertEqual(cookie2965.version, 1) def test_ns_parser(self): c = CookieJar() @@ -778,8 +768,7 @@ class CookieTests(unittest.TestCase): # Cookie is sent back to the same URI. self.assertEqual(interact_netscape(cj, uri), value) - def test_escape_path(self): - cases = [ + @support.subTests('arg,result', [ # quoted safe ("/foo%2f/bar", "/foo%2F/bar"), ("/foo%2F/bar", "/foo%2F/bar"), @@ -799,9 +788,9 @@ class CookieTests(unittest.TestCase): ("/foo/bar\u00fc", "/foo/bar%C3%BC"), # UTF-8 encoded # unicode ("/foo/bar\uabcd", "/foo/bar%EA%AF%8D"), # UTF-8 encoded - ] - for arg, result in cases: - self.assertEqual(escape_path(arg), result) + ]) + def test_escape_path(self, arg, result): + self.assertEqual(escape_path(arg), result) def test_request_path(self): # with parameters diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py index b891d0734ca..22f6403d482 100644 --- a/Lib/test/test_ntpath.py +++ b/Lib/test/test_ntpath.py @@ -7,6 +7,7 @@ import sys import unittest import warnings from ntpath import ALLOW_MISSING +from test import support from test.support import TestFailed, cpython_only, os_helper from test.support.os_helper import FakePath from test import test_genericpath @@ -78,24 +79,7 @@ def tester(fn, wantResult): def _parameterize(*parameters): - """Simplistic decorator to parametrize a test - - Runs the decorated test multiple times in subTest, with a value from - 'parameters' passed as an extra positional argument. - Calls doCleanups() after each run. - - Not for general use. Intended to avoid indenting for easier backports. - - See https://discuss.python.org/t/91827 for discussing generalizations. - """ - def _parametrize_decorator(func): - def _parameterized(self, *args, **kwargs): - for parameter in parameters: - with self.subTest(parameter): - func(self, *args, parameter, **kwargs) - self.doCleanups() - return _parameterized - return _parametrize_decorator + return support.subTests('kwargs', parameters, _do_cleanups=True) class NtpathTestCase(unittest.TestCase): diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py index c45ce6d3ef7..21f06712548 100644 --- a/Lib/test/test_posixpath.py +++ b/Lib/test/test_posixpath.py @@ -36,23 +36,7 @@ def skip_if_ABSTFN_contains_backslash(test): def _parameterize(*parameters): - """Simplistic decorator to parametrize a test - - Runs the decorated test multiple times in subTest, with a value from - 'parameters' passed as an extra positional argument. - Does *not* call doCleanups() after each run. - - Not for general use. Intended to avoid indenting for easier backports. - - See https://discuss.python.org/t/91827 for discussing generalizations. - """ - def _parametrize_decorator(func): - def _parameterized(self, *args, **kwargs): - for parameter in parameters: - with self.subTest(parameter): - func(self, *args, parameter, **kwargs) - return _parameterized - return _parametrize_decorator + return support.subTests('kwargs', parameters) class PosixPathTest(unittest.TestCase): diff --git a/Lib/test/test_sqlite3/test_cli.py b/Lib/test/test_sqlite3/test_cli.py index 37e0f74f688..7f0b0f36505 100644 --- a/Lib/test/test_sqlite3/test_cli.py +++ b/Lib/test/test_sqlite3/test_cli.py @@ -1,14 +1,19 @@ """sqlite3 CLI tests.""" import sqlite3 +import sys +import textwrap import unittest from sqlite3.__main__ import main as cli +from test.support.import_helper import import_module from test.support.os_helper import TESTFN, unlink +from test.support.pty_helper import run_pty from test.support import ( captured_stdout, captured_stderr, captured_stdin, force_not_colorized_test_class, + requires_subprocess, ) @@ -200,5 +205,98 @@ class InteractiveSession(unittest.TestCase): self.assertIn('\x1b[1;35mOperationalError (SQLITE_ERROR)\x1b[0m: ' '\x1b[35mnear "sel": syntax error\x1b[0m', err) + +@requires_subprocess() +@force_not_colorized_test_class +class Completion(unittest.TestCase): + PS1 = "sqlite> " + + @classmethod + def setUpClass(cls): + _sqlite3 = import_module("_sqlite3") + if not hasattr(_sqlite3, "SQLITE_KEYWORDS"): + raise unittest.SkipTest("unable to determine SQLite keywords") + + readline = import_module("readline") + if readline.backend == "editline": + raise unittest.SkipTest("libedit readline is not supported") + + def write_input(self, input_, env=None): + script = textwrap.dedent(""" + import readline + from sqlite3.__main__ import main + + readline.parse_and_bind("set colored-completion-prefix off") + main() + """) + return run_pty(script, input_, env) + + def test_complete_sql_keywords(self): + # List candidates starting with 'S', there should be multiple matches. + input_ = b"S\t\tEL\t 1;\n.quit\n" + output = self.write_input(input_) + self.assertIn(b"SELECT", output) + self.assertIn(b"SET", output) + self.assertIn(b"SAVEPOINT", output) + self.assertIn(b"(1,)", output) + + # Keywords are completed in upper case for even lower case user input. + input_ = b"sel\t\t 1;\n.quit\n" + output = self.write_input(input_) + self.assertIn(b"SELECT", output) + self.assertIn(b"(1,)", output) + + @unittest.skipIf(sys.platform.startswith("freebsd"), + "Two actual tabs are inserted when there are no matching" + " completions in the pseudo-terminal opened by run_pty()" + " on FreeBSD") + def test_complete_no_match(self): + input_ = b"xyzzy\t\t\b\b\b\b\b\b\b.quit\n" + # Set NO_COLOR to disable coloring for self.PS1. + output = self.write_input(input_, env={"NO_COLOR": "1"}) + lines = output.decode().splitlines() + indices = ( + i for i, line in enumerate(lines, 1) + if line.startswith(f"{self.PS1}xyzzy") + ) + line_num = next(indices, -1) + self.assertNotEqual(line_num, -1) + # Completions occupy lines, assert no extra lines when there is nothing + # to complete. + self.assertEqual(line_num, len(lines)) + + def test_complete_no_input(self): + from _sqlite3 import SQLITE_KEYWORDS + + script = textwrap.dedent(""" + import readline + from sqlite3.__main__ import main + + # Configure readline to ...: + # - hide control sequences surrounding each candidate + # - hide "Display all xxx possibilities? (y or n)" + # - hide "--More--" + # - show candidates one per line + readline.parse_and_bind("set colored-completion-prefix off") + readline.parse_and_bind("set colored-stats off") + readline.parse_and_bind("set completion-query-items 0") + readline.parse_and_bind("set page-completions off") + readline.parse_and_bind("set completion-display-width 0") + + main() + """) + input_ = b"\t\t.quit\n" + output = run_pty(script, input_, env={"NO_COLOR": "1"}) + lines = output.decode().splitlines() + indices = [ + i for i, line in enumerate(lines) + if line.startswith(self.PS1) + ] + self.assertEqual(len(indices), 2) + start, end = indices + candidates = [l.strip() for l in lines[start+1:end]] + self.assertEqual(candidates, sorted(SQLITE_KEYWORDS)) + + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index c7ac7914158..13aaba405e3 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -1436,17 +1436,17 @@ Regression tests for gh-133999: >>> try: pass ... except TypeError as name: raise from None Traceback (most recent call last): - SyntaxError: invalid syntax + SyntaxError: did you forget an expression between 'raise' and 'from'? >>> try: pass ... except* TypeError as name: raise from None Traceback (most recent call last): - SyntaxError: invalid syntax + SyntaxError: did you forget an expression between 'raise' and 'from'? >>> match 1: ... case 1 | 2 as abc: raise from None Traceback (most recent call last): - SyntaxError: invalid syntax + SyntaxError: did you forget an expression between 'raise' and 'from'? Ensure that early = are not matched by the parser as invalid comparisons >>> f(2, 4, x=34); 1 $ 2 @@ -1695,6 +1695,28 @@ Make sure that the old "raise X, Y[, Z]" form is gone: ... SyntaxError: invalid syntax +Better errors for `raise` statement: + + >>> raise ValueError from + Traceback (most recent call last): + SyntaxError: did you forget an expression after 'from'? + + >>> raise mod.ValueError() from + Traceback (most recent call last): + SyntaxError: did you forget an expression after 'from'? + + >>> raise from exc + Traceback (most recent call last): + SyntaxError: did you forget an expression between 'raise' and 'from'? + + >>> raise from None + Traceback (most recent call last): + SyntaxError: did you forget an expression between 'raise' and 'from'? + + >>> raise from + Traceback (most recent call last): + SyntaxError: did you forget an expression between 'raise' and 'from'? + Check that an multiple exception types with missing parentheses raise a custom exception only when using 'as' diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py index aabc360289a..b2bde5a9b1d 100644 --- a/Lib/test/test_urlparse.py +++ b/Lib/test/test_urlparse.py @@ -2,6 +2,7 @@ import sys import unicodedata import unittest import urllib.parse +from test import support RFC1808_BASE = "http://a/b/c/d;p?q#f" RFC2396_BASE = "http://a/b/c/d;p?q" @@ -156,27 +157,25 @@ class UrlParseTestCase(unittest.TestCase): self.assertEqual(result3.hostname, result.hostname) self.assertEqual(result3.port, result.port) - def test_qsl(self): - for orig, expect in parse_qsl_test_cases: - result = urllib.parse.parse_qsl(orig, keep_blank_values=True) - self.assertEqual(result, expect, "Error parsing %r" % orig) - expect_without_blanks = [v for v in expect if len(v[1])] - result = urllib.parse.parse_qsl(orig, keep_blank_values=False) - self.assertEqual(result, expect_without_blanks, - "Error parsing %r" % orig) - - def test_qs(self): - for orig, expect in parse_qs_test_cases: - result = urllib.parse.parse_qs(orig, keep_blank_values=True) - self.assertEqual(result, expect, "Error parsing %r" % orig) - expect_without_blanks = {v: expect[v] - for v in expect if len(expect[v][0])} - result = urllib.parse.parse_qs(orig, keep_blank_values=False) - self.assertEqual(result, expect_without_blanks, - "Error parsing %r" % orig) - - def test_roundtrips(self): - str_cases = [ + @support.subTests('orig,expect', parse_qsl_test_cases) + def test_qsl(self, orig, expect): + result = urllib.parse.parse_qsl(orig, keep_blank_values=True) + self.assertEqual(result, expect) + expect_without_blanks = [v for v in expect if len(v[1])] + result = urllib.parse.parse_qsl(orig, keep_blank_values=False) + self.assertEqual(result, expect_without_blanks) + + @support.subTests('orig,expect', parse_qs_test_cases) + def test_qs(self, orig, expect): + result = urllib.parse.parse_qs(orig, keep_blank_values=True) + self.assertEqual(result, expect) + expect_without_blanks = {v: expect[v] + for v in expect if len(expect[v][0])} + result = urllib.parse.parse_qs(orig, keep_blank_values=False) + self.assertEqual(result, expect_without_blanks) + + @support.subTests('bytes', (False, True)) + @support.subTests('url,parsed,split', [ ('path/to/file', ('', '', 'path/to/file', '', '', ''), ('', '', 'path/to/file', '', '')), @@ -263,23 +262,21 @@ class UrlParseTestCase(unittest.TestCase): ('sch_me:path/to/file', ('', '', 'sch_me:path/to/file', '', '', ''), ('', '', 'sch_me:path/to/file', '', '')), - ] - def _encode(t): - return (t[0].encode('ascii'), - tuple(x.encode('ascii') for x in t[1]), - tuple(x.encode('ascii') for x in t[2])) - bytes_cases = [_encode(x) for x in str_cases] - str_cases += [ ('schème:path/to/file', ('', '', 'schème:path/to/file', '', '', ''), ('', '', 'schème:path/to/file', '', '')), - ] - for url, parsed, split in str_cases + bytes_cases: - with self.subTest(url): - self.checkRoundtrips(url, parsed, split) - - def test_roundtrips_normalization(self): - str_cases = [ + ]) + def test_roundtrips(self, bytes, url, parsed, split): + if bytes: + if not url.isascii(): + self.skipTest('non-ASCII bytes') + url = str_encode(url) + parsed = tuple_encode(parsed) + split = tuple_encode(split) + self.checkRoundtrips(url, parsed, split) + + @support.subTests('bytes', (False, True)) + @support.subTests('url,url2,parsed,split', [ ('///path/to/file', '/path/to/file', ('', '', '/path/to/file', '', '', ''), @@ -300,22 +297,18 @@ class UrlParseTestCase(unittest.TestCase): 'https:///tmp/junk.txt', ('https', '', '/tmp/junk.txt', '', '', ''), ('https', '', '/tmp/junk.txt', '', '')), - ] - def _encode(t): - return (t[0].encode('ascii'), - t[1].encode('ascii'), - tuple(x.encode('ascii') for x in t[2]), - tuple(x.encode('ascii') for x in t[3])) - bytes_cases = [_encode(x) for x in str_cases] - for url, url2, parsed, split in str_cases + bytes_cases: - with self.subTest(url): - self.checkRoundtrips(url, parsed, split, url2) - - def test_http_roundtrips(self): - # urllib.parse.urlsplit treats 'http:' as an optimized special case, - # so we test both 'http:' and 'https:' in all the following. - # Three cheers for white box knowledge! - str_cases = [ + ]) + def test_roundtrips_normalization(self, bytes, url, url2, parsed, split): + if bytes: + url = str_encode(url) + url2 = str_encode(url2) + parsed = tuple_encode(parsed) + split = tuple_encode(split) + self.checkRoundtrips(url, parsed, split, url2) + + @support.subTests('bytes', (False, True)) + @support.subTests('scheme', ('http', 'https')) + @support.subTests('url,parsed,split', [ ('://www.python.org', ('www.python.org', '', '', '', ''), ('www.python.org', '', '', '')), @@ -331,23 +324,20 @@ class UrlParseTestCase(unittest.TestCase): ('://a/b/c/d;p?q#f', ('a', '/b/c/d', 'p', 'q', 'f'), ('a', '/b/c/d;p', 'q', 'f')), - ] - def _encode(t): - return (t[0].encode('ascii'), - tuple(x.encode('ascii') for x in t[1]), - tuple(x.encode('ascii') for x in t[2])) - bytes_cases = [_encode(x) for x in str_cases] - str_schemes = ('http', 'https') - bytes_schemes = (b'http', b'https') - str_tests = str_schemes, str_cases - bytes_tests = bytes_schemes, bytes_cases - for schemes, test_cases in (str_tests, bytes_tests): - for scheme in schemes: - for url, parsed, split in test_cases: - url = scheme + url - parsed = (scheme,) + parsed - split = (scheme,) + split - self.checkRoundtrips(url, parsed, split) + ]) + def test_http_roundtrips(self, bytes, scheme, url, parsed, split): + # urllib.parse.urlsplit treats 'http:' as an optimized special case, + # so we test both 'http:' and 'https:' in all the following. + # Three cheers for white box knowledge! + if bytes: + scheme = str_encode(scheme) + url = str_encode(url) + parsed = tuple_encode(parsed) + split = tuple_encode(split) + url = scheme + url + parsed = (scheme,) + parsed + split = (scheme,) + split + self.checkRoundtrips(url, parsed, split) def checkJoin(self, base, relurl, expected, *, relroundtrip=True): with self.subTest(base=base, relurl=relurl): @@ -363,12 +353,13 @@ class UrlParseTestCase(unittest.TestCase): relurlb = urllib.parse.urlunsplit(urllib.parse.urlsplit(relurlb)) self.assertEqual(urllib.parse.urljoin(baseb, relurlb), expectedb) - def test_unparse_parse(self): - str_cases = ['Python', './Python','x-newscheme://foo.com/stuff','x://y','x:/y','x:/','/',] - bytes_cases = [x.encode('ascii') for x in str_cases] - for u in str_cases + bytes_cases: - self.assertEqual(urllib.parse.urlunsplit(urllib.parse.urlsplit(u)), u) - self.assertEqual(urllib.parse.urlunparse(urllib.parse.urlparse(u)), u) + @support.subTests('bytes', (False, True)) + @support.subTests('u', ['Python', './Python','x-newscheme://foo.com/stuff','x://y','x:/y','x:/','/',]) + def test_unparse_parse(self, bytes, u): + if bytes: + u = str_encode(u) + self.assertEqual(urllib.parse.urlunsplit(urllib.parse.urlsplit(u)), u) + self.assertEqual(urllib.parse.urlunparse(urllib.parse.urlparse(u)), u) def test_RFC1808(self): # "normal" cases from RFC 1808: @@ -695,8 +686,8 @@ class UrlParseTestCase(unittest.TestCase): self.checkJoin('///b/c', '///w', '///w') self.checkJoin('///b/c', 'w', '///b/w') - def test_RFC2732(self): - str_cases = [ + @support.subTests('bytes', (False, True)) + @support.subTests('url,hostname,port', [ ('http://Test.python.org:5432/foo/', 'test.python.org', 5432), ('http://12.34.56.78:5432/foo/', '12.34.56.78', 5432), ('http://[::1]:5432/foo/', '::1', 5432), @@ -727,26 +718,28 @@ class UrlParseTestCase(unittest.TestCase): ('http://[::12.34.56.78]:/foo/', '::12.34.56.78', None), ('http://[::ffff:12.34.56.78]:/foo/', '::ffff:12.34.56.78', None), - ] - def _encode(t): - return t[0].encode('ascii'), t[1].encode('ascii'), t[2] - bytes_cases = [_encode(x) for x in str_cases] - for url, hostname, port in str_cases + bytes_cases: - urlparsed = urllib.parse.urlparse(url) - self.assertEqual((urlparsed.hostname, urlparsed.port) , (hostname, port)) - - str_cases = [ + ]) + def test_RFC2732(self, bytes, url, hostname, port): + if bytes: + url = str_encode(url) + hostname = str_encode(hostname) + urlparsed = urllib.parse.urlparse(url) + self.assertEqual((urlparsed.hostname, urlparsed.port), (hostname, port)) + + @support.subTests('bytes', (False, True)) + @support.subTests('invalid_url', [ 'http://::12.34.56.78]/', 'http://[::1/foo/', 'ftp://[::1/foo/bad]/bad', 'http://[::1/foo/bad]/bad', - 'http://[::ffff:12.34.56.78'] - bytes_cases = [x.encode('ascii') for x in str_cases] - for invalid_url in str_cases + bytes_cases: - self.assertRaises(ValueError, urllib.parse.urlparse, invalid_url) - - def test_urldefrag(self): - str_cases = [ + 'http://[::ffff:12.34.56.78']) + def test_RFC2732_invalid(self, bytes, invalid_url): + if bytes: + invalid_url = str_encode(invalid_url) + self.assertRaises(ValueError, urllib.parse.urlparse, invalid_url) + + @support.subTests('bytes', (False, True)) + @support.subTests('url,defrag,frag', [ ('http://python.org#frag', 'http://python.org', 'frag'), ('http://python.org', 'http://python.org', ''), ('http://python.org/#frag', 'http://python.org/', 'frag'), @@ -770,18 +763,18 @@ class UrlParseTestCase(unittest.TestCase): ('http:?q#f', 'http:?q', 'f'), ('//a/b/c;p?q#f', '//a/b/c;p?q', 'f'), ('://a/b/c;p?q#f', '://a/b/c;p?q', 'f'), - ] - def _encode(t): - return type(t)(x.encode('ascii') for x in t) - bytes_cases = [_encode(x) for x in str_cases] - for url, defrag, frag in str_cases + bytes_cases: - with self.subTest(url): - result = urllib.parse.urldefrag(url) - hash = '#' if isinstance(url, str) else b'#' - self.assertEqual(result.geturl(), url.rstrip(hash)) - self.assertEqual(result, (defrag, frag)) - self.assertEqual(result.url, defrag) - self.assertEqual(result.fragment, frag) + ]) + def test_urldefrag(self, bytes, url, defrag, frag): + if bytes: + url = str_encode(url) + defrag = str_encode(defrag) + frag = str_encode(frag) + result = urllib.parse.urldefrag(url) + hash = '#' if isinstance(url, str) else b'#' + self.assertEqual(result.geturl(), url.rstrip(hash)) + self.assertEqual(result, (defrag, frag)) + self.assertEqual(result.url, defrag) + self.assertEqual(result.fragment, frag) def test_urlsplit_scoped_IPv6(self): p = urllib.parse.urlsplit('http://[FE80::822a:a8ff:fe49:470c%tESt]:1234') @@ -981,42 +974,35 @@ class UrlParseTestCase(unittest.TestCase): self.assertEqual(p.scheme, "https") self.assertEqual(p.geturl(), "https://www.python.org/") - def test_attributes_bad_port(self): + @support.subTests('bytes', (False, True)) + @support.subTests('parse', (urllib.parse.urlsplit, urllib.parse.urlparse)) + @support.subTests('port', ("foo", "1.5", "-1", "0x10", "-0", "1_1", " 1", "1 ", "६")) + def test_attributes_bad_port(self, bytes, parse, port): """Check handling of invalid ports.""" - for bytes in (False, True): - for parse in (urllib.parse.urlsplit, urllib.parse.urlparse): - for port in ("foo", "1.5", "-1", "0x10", "-0", "1_1", " 1", "1 ", "६"): - with self.subTest(bytes=bytes, parse=parse, port=port): - netloc = "www.example.net:" + port - url = "http://" + netloc + "/" - if bytes: - if netloc.isascii() and port.isascii(): - netloc = netloc.encode("ascii") - url = url.encode("ascii") - else: - continue - p = parse(url) - self.assertEqual(p.netloc, netloc) - with self.assertRaises(ValueError): - p.port + netloc = "www.example.net:" + port + url = "http://" + netloc + "/" + if bytes: + if not (netloc.isascii() and port.isascii()): + self.skipTest('non-ASCII bytes') + netloc = str_encode(netloc) + url = str_encode(url) + p = parse(url) + self.assertEqual(p.netloc, netloc) + with self.assertRaises(ValueError): + p.port - def test_attributes_bad_scheme(self): + @support.subTests('bytes', (False, True)) + @support.subTests('parse', (urllib.parse.urlsplit, urllib.parse.urlparse)) + @support.subTests('scheme', (".", "+", "-", "0", "http&", "६http")) + def test_attributes_bad_scheme(self, bytes, parse, scheme): """Check handling of invalid schemes.""" - for bytes in (False, True): - for parse in (urllib.parse.urlsplit, urllib.parse.urlparse): - for scheme in (".", "+", "-", "0", "http&", "६http"): - with self.subTest(bytes=bytes, parse=parse, scheme=scheme): - url = scheme + "://www.example.net" - if bytes: - if url.isascii(): - url = url.encode("ascii") - else: - continue - p = parse(url) - if bytes: - self.assertEqual(p.scheme, b"") - else: - self.assertEqual(p.scheme, "") + url = scheme + "://www.example.net" + if bytes: + if not url.isascii(): + self.skipTest('non-ASCII bytes') + url = url.encode("ascii") + p = parse(url) + self.assertEqual(p.scheme, b"" if bytes else "") def test_attributes_without_netloc(self): # This example is straight from RFC 3261. It looks like it @@ -1128,24 +1114,21 @@ class UrlParseTestCase(unittest.TestCase): self.assertEqual(urllib.parse.urlparse(b"x-newscheme://foo.com/stuff?query"), (b'x-newscheme', b'foo.com', b'/stuff', b'', b'query', b'')) - def test_default_scheme(self): + @support.subTests('func', (urllib.parse.urlparse, urllib.parse.urlsplit)) + def test_default_scheme(self, func): # Exercise the scheme parameter of urlparse() and urlsplit() - for func in (urllib.parse.urlparse, urllib.parse.urlsplit): - with self.subTest(function=func): - result = func("http://example.net/", "ftp") - self.assertEqual(result.scheme, "http") - result = func(b"http://example.net/", b"ftp") - self.assertEqual(result.scheme, b"http") - self.assertEqual(func("path", "ftp").scheme, "ftp") - self.assertEqual(func("path", scheme="ftp").scheme, "ftp") - self.assertEqual(func(b"path", scheme=b"ftp").scheme, b"ftp") - self.assertEqual(func("path").scheme, "") - self.assertEqual(func(b"path").scheme, b"") - self.assertEqual(func(b"path", "").scheme, b"") - - def test_parse_fragments(self): - # Exercise the allow_fragments parameter of urlparse() and urlsplit() - tests = ( + result = func("http://example.net/", "ftp") + self.assertEqual(result.scheme, "http") + result = func(b"http://example.net/", b"ftp") + self.assertEqual(result.scheme, b"http") + self.assertEqual(func("path", "ftp").scheme, "ftp") + self.assertEqual(func("path", scheme="ftp").scheme, "ftp") + self.assertEqual(func(b"path", scheme=b"ftp").scheme, b"ftp") + self.assertEqual(func("path").scheme, "") + self.assertEqual(func(b"path").scheme, b"") + self.assertEqual(func(b"path", "").scheme, b"") + + @support.subTests('url,attr,expected_frag', ( ("http:#frag", "path", "frag"), ("//example.net#frag", "path", "frag"), ("index.html#frag", "path", "frag"), @@ -1156,24 +1139,24 @@ class UrlParseTestCase(unittest.TestCase): ("//abc#@frag", "path", "@frag"), ("//abc:80#@frag", "path", "@frag"), ("//abc#@frag:80", "path", "@frag:80"), - ) - for url, attr, expected_frag in tests: - for func in (urllib.parse.urlparse, urllib.parse.urlsplit): - if attr == "params" and func is urllib.parse.urlsplit: - attr = "path" - with self.subTest(url=url, function=func): - result = func(url, allow_fragments=False) - self.assertEqual(result.fragment, "") - self.assertEndsWith(getattr(result, attr), - "#" + expected_frag) - self.assertEqual(func(url, "", False).fragment, "") - - result = func(url, allow_fragments=True) - self.assertEqual(result.fragment, expected_frag) - self.assertNotEndsWith(getattr(result, attr), expected_frag) - self.assertEqual(func(url, "", True).fragment, - expected_frag) - self.assertEqual(func(url).fragment, expected_frag) + )) + @support.subTests('func', (urllib.parse.urlparse, urllib.parse.urlsplit)) + def test_parse_fragments(self, url, attr, expected_frag, func): + # Exercise the allow_fragments parameter of urlparse() and urlsplit() + if attr == "params" and func is urllib.parse.urlsplit: + attr = "path" + result = func(url, allow_fragments=False) + self.assertEqual(result.fragment, "") + self.assertEndsWith(getattr(result, attr), + "#" + expected_frag) + self.assertEqual(func(url, "", False).fragment, "") + + result = func(url, allow_fragments=True) + self.assertEqual(result.fragment, expected_frag) + self.assertNotEndsWith(getattr(result, attr), expected_frag) + self.assertEqual(func(url, "", True).fragment, + expected_frag) + self.assertEqual(func(url).fragment, expected_frag) def test_mixed_types_rejected(self): # Several functions that process either strings or ASCII encoded bytes @@ -1199,7 +1182,14 @@ class UrlParseTestCase(unittest.TestCase): with self.assertRaisesRegex(TypeError, "Cannot mix str"): urllib.parse.urljoin(b"http://python.org", "http://python.org") - def _check_result_type(self, str_type): + @support.subTests('result_type', [ + urllib.parse.DefragResult, + urllib.parse.SplitResult, + urllib.parse.ParseResult, + ]) + def test_result_pairs(self, result_type): + # Check encoding and decoding between result pairs + str_type = result_type num_args = len(str_type._fields) bytes_type = str_type._encoded_counterpart self.assertIs(bytes_type._decoded_counterpart, str_type) @@ -1224,16 +1214,6 @@ class UrlParseTestCase(unittest.TestCase): self.assertEqual(str_result.encode(encoding, errors), bytes_args) self.assertEqual(str_result.encode(encoding, errors), bytes_result) - def test_result_pairs(self): - # Check encoding and decoding between result pairs - result_types = [ - urllib.parse.DefragResult, - urllib.parse.SplitResult, - urllib.parse.ParseResult, - ] - for result_type in result_types: - self._check_result_type(result_type) - def test_parse_qs_encoding(self): result = urllib.parse.parse_qs("key=\u0141%E9", encoding="latin-1") self.assertEqual(result, {'key': ['\u0141\xE9']}) @@ -1265,8 +1245,7 @@ class UrlParseTestCase(unittest.TestCase): urllib.parse.parse_qsl('&'.join(['a=a']*11), max_num_fields=10) urllib.parse.parse_qsl('&'.join(['a=a']*10), max_num_fields=10) - def test_parse_qs_separator(self): - parse_qs_semicolon_cases = [ + @support.subTests('orig,expect', [ (";", {}), (";;", {}), (";a=b", {'a': ['b']}), @@ -1277,17 +1256,14 @@ class UrlParseTestCase(unittest.TestCase): (b";a=b", {b'a': [b'b']}), (b"a=a+b;b=b+c", {b'a': [b'a b'], b'b': [b'b c']}), (b"a=1;a=2", {b'a': [b'1', b'2']}), - ] - for orig, expect in parse_qs_semicolon_cases: - with self.subTest(f"Original: {orig!r}, Expected: {expect!r}"): - result = urllib.parse.parse_qs(orig, separator=';') - self.assertEqual(result, expect, "Error parsing %r" % orig) - result_bytes = urllib.parse.parse_qs(orig, separator=b';') - self.assertEqual(result_bytes, expect, "Error parsing %r" % orig) - - - def test_parse_qsl_separator(self): - parse_qsl_semicolon_cases = [ + ]) + def test_parse_qs_separator(self, orig, expect): + result = urllib.parse.parse_qs(orig, separator=';') + self.assertEqual(result, expect) + result_bytes = urllib.parse.parse_qs(orig, separator=b';') + self.assertEqual(result_bytes, expect) + + @support.subTests('orig,expect', [ (";", []), (";;", []), (";a=b", [('a', 'b')]), @@ -1298,13 +1274,12 @@ class UrlParseTestCase(unittest.TestCase): (b";a=b", [(b'a', b'b')]), (b"a=a+b;b=b+c", [(b'a', b'a b'), (b'b', b'b c')]), (b"a=1;a=2", [(b'a', b'1'), (b'a', b'2')]), - ] - for orig, expect in parse_qsl_semicolon_cases: - with self.subTest(f"Original: {orig!r}, Expected: {expect!r}"): - result = urllib.parse.parse_qsl(orig, separator=';') - self.assertEqual(result, expect, "Error parsing %r" % orig) - result_bytes = urllib.parse.parse_qsl(orig, separator=b';') - self.assertEqual(result_bytes, expect, "Error parsing %r" % orig) + ]) + def test_parse_qsl_separator(self, orig, expect): + result = urllib.parse.parse_qsl(orig, separator=';') + self.assertEqual(result, expect) + result_bytes = urllib.parse.parse_qsl(orig, separator=b';') + self.assertEqual(result_bytes, expect) def test_parse_qsl_bytes(self): self.assertEqual(urllib.parse.parse_qsl(b'a=b'), [(b'a', b'b')]) @@ -1695,11 +1670,12 @@ class Utility_Tests(unittest.TestCase): self.assertRaises(UnicodeError, urllib.parse._to_bytes, 'http://www.python.org/medi\u00e6val') - def test_unwrap(self): - for wrapped_url in ('<URL:scheme://host/path>', '<scheme://host/path>', - 'URL:scheme://host/path', 'scheme://host/path'): - url = urllib.parse.unwrap(wrapped_url) - self.assertEqual(url, 'scheme://host/path') + @support.subTests('wrapped_url', + ('<URL:scheme://host/path>', '<scheme://host/path>', + 'URL:scheme://host/path', 'scheme://host/path')) + def test_unwrap(self, wrapped_url): + url = urllib.parse.unwrap(wrapped_url) + self.assertEqual(url, 'scheme://host/path') class DeprecationTest(unittest.TestCase): @@ -1780,5 +1756,11 @@ class DeprecationTest(unittest.TestCase): 'urllib.parse.to_bytes() is deprecated as of 3.8') +def str_encode(s): + return s.encode('ascii') + +def tuple_encode(t): + return tuple(str_encode(x) for x in t) + if __name__ == "__main__": unittest.main() diff --git a/Misc/ACKS b/Misc/ACKS index 2435943f1bb..739af8d9e11 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1868,6 +1868,7 @@ Neil Tallim Geoff Talvola Anish Tambe Musashi Tamura +Long Tan William Tanksley Christian Tanzer Steven Taschuk diff --git a/Misc/NEWS.d/next/C_API/2025-05-29-16-56-23.gh-issue-134891.7eKO8U.rst b/Misc/NEWS.d/next/C_API/2025-05-29-16-56-23.gh-issue-134891.7eKO8U.rst new file mode 100644 index 00000000000..db30d5e9a94 --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-05-29-16-56-23.gh-issue-134891.7eKO8U.rst @@ -0,0 +1,2 @@ +Add :c:type:`PyUnstable_Unicode_GET_CACHED_HASH` to get the cached hash of a +string. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-09-06-38.gh-issue-134036.st2e-B.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-09-06-38.gh-issue-134036.st2e-B.rst new file mode 100644 index 00000000000..176aab1c93a --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-09-06-38.gh-issue-134036.st2e-B.rst @@ -0,0 +1,2 @@ +Improve :exc:`SyntaxError` message when using invalid :keyword:`raise` +statements. diff --git a/Misc/NEWS.d/next/Library/2025-05-05-03-14-08.gh-issue-133390.AuTggn.rst b/Misc/NEWS.d/next/Library/2025-05-05-03-14-08.gh-issue-133390.AuTggn.rst new file mode 100644 index 00000000000..38d5c311b1d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-05-03-14-08.gh-issue-133390.AuTggn.rst @@ -0,0 +1 @@ +Support keyword completion in the :mod:`sqlite3` command-line interface. diff --git a/Misc/NEWS.d/next/Tests/2025-06-04-13-07-44.gh-issue-135120.NapnZT.rst b/Misc/NEWS.d/next/Tests/2025-06-04-13-07-44.gh-issue-135120.NapnZT.rst new file mode 100644 index 00000000000..772173774b1 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2025-06-04-13-07-44.gh-issue-135120.NapnZT.rst @@ -0,0 +1 @@ +Add :func:`!test.support.subTests`. diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c index 909ddd1f990..5464fd1227a 100644 --- a/Modules/_sqlite/module.c +++ b/Modules/_sqlite/module.c @@ -32,6 +32,7 @@ #include "microprotocols.h" #include "row.h" #include "blob.h" +#include "util.h" #if SQLITE_VERSION_NUMBER < 3015002 #error "SQLite 3.15.2 or higher required" @@ -405,6 +406,40 @@ pysqlite_error_name(int rc) } static int +add_keyword_tuple(PyObject *module) +{ +#if SQLITE_VERSION_NUMBER >= 3024000 + int count = sqlite3_keyword_count(); + PyObject *keywords = PyTuple_New(count); + if (keywords == NULL) { + return -1; + } + for (int i = 0; i < count; i++) { + const char *keyword; + int size; + int result = sqlite3_keyword_name(i, &keyword, &size); + if (result != SQLITE_OK) { + pysqlite_state *state = pysqlite_get_state(module); + set_error_from_code(state, result); + goto error; + } + PyObject *kwd = PyUnicode_FromStringAndSize(keyword, size); + if (!kwd) { + goto error; + } + PyTuple_SET_ITEM(keywords, i, kwd); + } + return PyModule_Add(module, "SQLITE_KEYWORDS", keywords); + +error: + Py_DECREF(keywords); + return -1; +#else + return 0; +#endif +} + +static int add_integer_constants(PyObject *module) { #define ADD_INT(ival) \ do { \ @@ -702,6 +737,10 @@ module_exec(PyObject *module) goto error; } + if (add_keyword_tuple(module) < 0) { + goto error; + } + if (PyModule_AddStringConstant(module, "sqlite_version", sqlite3_libversion())) { goto error; } diff --git a/Modules/_testcapi/unicode.c b/Modules/_testcapi/unicode.c index e70f5c68bc3..203282dd53d 100644 --- a/Modules/_testcapi/unicode.c +++ b/Modules/_testcapi/unicode.c @@ -220,6 +220,12 @@ unicode_copycharacters(PyObject *self, PyObject *args) return Py_BuildValue("(Nn)", to_copy, copied); } +static PyObject* +unicode_GET_CACHED_HASH(PyObject *self, PyObject *arg) +{ + return PyLong_FromSsize_t(PyUnstable_Unicode_GET_CACHED_HASH(arg)); +} + // --- PyUnicodeWriter type ------------------------------------------------- @@ -570,6 +576,7 @@ static PyMethodDef TestMethods[] = { {"unicode_asucs4copy", unicode_asucs4copy, METH_VARARGS}, {"unicode_asutf8", unicode_asutf8, METH_VARARGS}, {"unicode_copycharacters", unicode_copycharacters, METH_VARARGS}, + {"unicode_GET_CACHED_HASH", unicode_GET_CACHED_HASH, METH_O}, {NULL}, }; diff --git a/Modules/xxlimited.c b/Modules/xxlimited.c index 26ac35734fb..0480fb08498 100644 --- a/Modules/xxlimited.c +++ b/Modules/xxlimited.c @@ -424,6 +424,13 @@ xx_clear(PyObject *module) return 0; } +static void +xx_free(void *module) +{ + // allow xx_modexec to omit calling xx_clear on error + (void)xx_clear((PyObject *)module); +} + static struct PyModuleDef xxmodule = { PyModuleDef_HEAD_INIT, .m_name = "xxlimited", @@ -433,9 +440,7 @@ static struct PyModuleDef xxmodule = { .m_slots = xx_slots, .m_traverse = xx_traverse, .m_clear = xx_clear, - /* m_free is not necessary here: xx_clear clears all references, - * and the module state is deallocated along with the module. - */ + .m_free = xx_free, }; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 5611f839627..5c2308a0121 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -167,11 +167,7 @@ static inline void PyUnicode_SET_UTF8_LENGTH(PyObject *op, Py_ssize_t length) #define _PyUnicode_HASH(op) \ (_PyASCIIObject_CAST(op)->hash) -static inline Py_hash_t PyUnicode_HASH(PyObject *op) -{ - assert(_PyUnicode_CHECK(op)); - return FT_ATOMIC_LOAD_SSIZE_RELAXED(_PyASCIIObject_CAST(op)->hash); -} +#define PyUnicode_HASH PyUnstable_Unicode_GET_CACHED_HASH static inline void PyUnicode_SET_HASH(PyObject *op, Py_hash_t hash) { diff --git a/Parser/parser.c b/Parser/parser.c index d5aafef826e..82311b4f40e 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -21,54 +21,54 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) {{NULL, -1}}, (KeywordToken[]) { - {"if", 682}, - {"as", 680}, - {"in", 695}, + {"if", 686}, + {"as", 684}, + {"in", 699}, {"or", 588}, {"is", 596}, {NULL, -1}, }, (KeywordToken[]) { - {"del", 625}, - {"def", 699}, - {"for", 694}, - {"try", 656}, + {"del", 629}, + {"def", 703}, + {"for", 698}, + {"try", 660}, {"and", 589}, - {"not", 703}, + {"not", 707}, {NULL, -1}, }, (KeywordToken[]) { - {"from", 633}, + {"from", 637}, {"pass", 526}, - {"with", 647}, - {"elif", 687}, - {"else", 686}, + {"with", 651}, + {"elif", 691}, + {"else", 690}, {"None", 623}, {"True", 622}, {NULL, -1}, }, (KeywordToken[]) { - {"raise", 525}, + {"raise", 627}, {"yield", 587}, {"break", 527}, - {"async", 698}, - {"class", 701}, - {"while", 689}, + {"async", 702}, + {"class", 705}, + {"while", 693}, {"False", 624}, {"await", 597}, {NULL, -1}, }, (KeywordToken[]) { {"return", 522}, - {"import", 634}, + {"import", 638}, {"assert", 532}, {"global", 529}, - {"except", 677}, + {"except", 681}, {"lambda", 621}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 673}, + {"finally", 677}, {NULL, -1}, }, (KeywordToken[]) { @@ -298,235 +298,236 @@ static char *soft_keywords[] = { #define invalid_named_expression_type 1211 #define invalid_assignment_type 1212 #define invalid_ann_assign_target_type 1213 -#define invalid_del_stmt_type 1214 -#define invalid_block_type 1215 -#define invalid_comprehension_type 1216 -#define invalid_dict_comprehension_type 1217 -#define invalid_parameters_type 1218 -#define invalid_default_type 1219 -#define invalid_star_etc_type 1220 -#define invalid_kwds_type 1221 -#define invalid_parameters_helper_type 1222 -#define invalid_lambda_parameters_type 1223 -#define invalid_lambda_parameters_helper_type 1224 -#define invalid_lambda_star_etc_type 1225 -#define invalid_lambda_kwds_type 1226 -#define invalid_double_type_comments_type 1227 -#define invalid_with_item_type 1228 -#define invalid_for_if_clause_type 1229 -#define invalid_for_target_type 1230 -#define invalid_group_type 1231 -#define invalid_import_type 1232 -#define invalid_dotted_as_name_type 1233 -#define invalid_import_from_as_name_type 1234 -#define invalid_import_from_targets_type 1235 -#define invalid_with_stmt_type 1236 -#define invalid_with_stmt_indent_type 1237 -#define invalid_try_stmt_type 1238 -#define invalid_except_stmt_type 1239 -#define invalid_except_star_stmt_type 1240 -#define invalid_finally_stmt_type 1241 -#define invalid_except_stmt_indent_type 1242 -#define invalid_except_star_stmt_indent_type 1243 -#define invalid_match_stmt_type 1244 -#define invalid_case_block_type 1245 -#define invalid_as_pattern_type 1246 -#define invalid_class_pattern_type 1247 -#define invalid_class_argument_pattern_type 1248 -#define invalid_if_stmt_type 1249 -#define invalid_elif_stmt_type 1250 -#define invalid_else_stmt_type 1251 -#define invalid_while_stmt_type 1252 -#define invalid_for_stmt_type 1253 -#define invalid_def_raw_type 1254 -#define invalid_class_def_raw_type 1255 -#define invalid_double_starred_kvpairs_type 1256 -#define invalid_kvpair_type 1257 -#define invalid_starred_expression_unpacking_type 1258 -#define invalid_starred_expression_type 1259 -#define invalid_fstring_replacement_field_type 1260 -#define invalid_fstring_conversion_character_type 1261 -#define invalid_tstring_replacement_field_type 1262 -#define invalid_tstring_conversion_character_type 1263 -#define invalid_arithmetic_type 1264 -#define invalid_factor_type 1265 -#define invalid_type_params_type 1266 -#define _loop0_1_type 1267 -#define _loop1_2_type 1268 -#define _loop0_3_type 1269 -#define _gather_4_type 1270 -#define _tmp_5_type 1271 -#define _tmp_6_type 1272 -#define _tmp_7_type 1273 -#define _tmp_8_type 1274 -#define _tmp_9_type 1275 -#define _tmp_10_type 1276 -#define _tmp_11_type 1277 -#define _loop1_12_type 1278 -#define _tmp_13_type 1279 -#define _loop0_14_type 1280 -#define _gather_15_type 1281 -#define _tmp_16_type 1282 -#define _tmp_17_type 1283 -#define _loop0_18_type 1284 -#define _loop1_19_type 1285 -#define _loop0_20_type 1286 -#define _gather_21_type 1287 -#define _tmp_22_type 1288 -#define _loop0_23_type 1289 -#define _gather_24_type 1290 -#define _loop1_25_type 1291 -#define _tmp_26_type 1292 -#define _tmp_27_type 1293 -#define _loop0_28_type 1294 -#define _loop0_29_type 1295 -#define _loop1_30_type 1296 -#define _loop1_31_type 1297 -#define _loop0_32_type 1298 -#define _loop1_33_type 1299 -#define _loop0_34_type 1300 -#define _gather_35_type 1301 -#define _tmp_36_type 1302 -#define _loop1_37_type 1303 -#define _loop1_38_type 1304 -#define _loop1_39_type 1305 -#define _loop0_40_type 1306 -#define _gather_41_type 1307 -#define _tmp_42_type 1308 -#define _tmp_43_type 1309 -#define _tmp_44_type 1310 -#define _loop0_45_type 1311 -#define _gather_46_type 1312 -#define _loop0_47_type 1313 -#define _gather_48_type 1314 -#define _tmp_49_type 1315 -#define _loop0_50_type 1316 -#define _gather_51_type 1317 -#define _loop0_52_type 1318 -#define _gather_53_type 1319 -#define _loop0_54_type 1320 -#define _gather_55_type 1321 -#define _loop1_56_type 1322 -#define _loop1_57_type 1323 -#define _loop0_58_type 1324 -#define _gather_59_type 1325 -#define _loop1_60_type 1326 -#define _loop1_61_type 1327 -#define _loop1_62_type 1328 -#define _tmp_63_type 1329 -#define _loop0_64_type 1330 -#define _gather_65_type 1331 -#define _tmp_66_type 1332 -#define _tmp_67_type 1333 -#define _tmp_68_type 1334 -#define _tmp_69_type 1335 -#define _tmp_70_type 1336 -#define _loop0_71_type 1337 -#define _loop0_72_type 1338 -#define _loop1_73_type 1339 -#define _loop1_74_type 1340 -#define _loop0_75_type 1341 -#define _loop1_76_type 1342 -#define _loop0_77_type 1343 -#define _loop0_78_type 1344 -#define _loop0_79_type 1345 -#define _loop0_80_type 1346 -#define _loop1_81_type 1347 -#define _tmp_82_type 1348 -#define _loop0_83_type 1349 -#define _gather_84_type 1350 -#define _loop1_85_type 1351 -#define _loop0_86_type 1352 -#define _tmp_87_type 1353 -#define _loop0_88_type 1354 -#define _gather_89_type 1355 -#define _tmp_90_type 1356 -#define _loop0_91_type 1357 -#define _gather_92_type 1358 -#define _loop0_93_type 1359 -#define _gather_94_type 1360 -#define _loop0_95_type 1361 -#define _loop0_96_type 1362 -#define _gather_97_type 1363 -#define _loop1_98_type 1364 -#define _tmp_99_type 1365 -#define _loop0_100_type 1366 -#define _gather_101_type 1367 -#define _loop0_102_type 1368 -#define _gather_103_type 1369 -#define _tmp_104_type 1370 -#define _tmp_105_type 1371 -#define _loop0_106_type 1372 -#define _gather_107_type 1373 -#define _tmp_108_type 1374 -#define _tmp_109_type 1375 -#define _tmp_110_type 1376 -#define _tmp_111_type 1377 -#define _tmp_112_type 1378 -#define _loop1_113_type 1379 -#define _tmp_114_type 1380 -#define _tmp_115_type 1381 -#define _tmp_116_type 1382 -#define _tmp_117_type 1383 -#define _tmp_118_type 1384 -#define _loop0_119_type 1385 -#define _loop0_120_type 1386 -#define _tmp_121_type 1387 -#define _tmp_122_type 1388 -#define _tmp_123_type 1389 -#define _tmp_124_type 1390 -#define _tmp_125_type 1391 -#define _tmp_126_type 1392 -#define _tmp_127_type 1393 -#define _tmp_128_type 1394 -#define _tmp_129_type 1395 -#define _loop0_130_type 1396 -#define _gather_131_type 1397 -#define _tmp_132_type 1398 -#define _tmp_133_type 1399 -#define _tmp_134_type 1400 -#define _tmp_135_type 1401 -#define _loop0_136_type 1402 -#define _gather_137_type 1403 -#define _tmp_138_type 1404 -#define _loop0_139_type 1405 -#define _gather_140_type 1406 -#define _loop0_141_type 1407 -#define _gather_142_type 1408 -#define _tmp_143_type 1409 -#define _loop0_144_type 1410 -#define _tmp_145_type 1411 -#define _tmp_146_type 1412 -#define _tmp_147_type 1413 -#define _tmp_148_type 1414 -#define _tmp_149_type 1415 -#define _tmp_150_type 1416 -#define _tmp_151_type 1417 -#define _tmp_152_type 1418 -#define _tmp_153_type 1419 -#define _tmp_154_type 1420 -#define _tmp_155_type 1421 -#define _tmp_156_type 1422 -#define _tmp_157_type 1423 -#define _tmp_158_type 1424 -#define _tmp_159_type 1425 -#define _tmp_160_type 1426 -#define _tmp_161_type 1427 -#define _tmp_162_type 1428 -#define _tmp_163_type 1429 -#define _tmp_164_type 1430 -#define _tmp_165_type 1431 -#define _tmp_166_type 1432 -#define _tmp_167_type 1433 -#define _tmp_168_type 1434 -#define _tmp_169_type 1435 -#define _tmp_170_type 1436 -#define _loop0_171_type 1437 -#define _tmp_172_type 1438 -#define _tmp_173_type 1439 -#define _tmp_174_type 1440 -#define _tmp_175_type 1441 -#define _tmp_176_type 1442 +#define invalid_raise_stmt_type 1214 +#define invalid_del_stmt_type 1215 +#define invalid_block_type 1216 +#define invalid_comprehension_type 1217 +#define invalid_dict_comprehension_type 1218 +#define invalid_parameters_type 1219 +#define invalid_default_type 1220 +#define invalid_star_etc_type 1221 +#define invalid_kwds_type 1222 +#define invalid_parameters_helper_type 1223 +#define invalid_lambda_parameters_type 1224 +#define invalid_lambda_parameters_helper_type 1225 +#define invalid_lambda_star_etc_type 1226 +#define invalid_lambda_kwds_type 1227 +#define invalid_double_type_comments_type 1228 +#define invalid_with_item_type 1229 +#define invalid_for_if_clause_type 1230 +#define invalid_for_target_type 1231 +#define invalid_group_type 1232 +#define invalid_import_type 1233 +#define invalid_dotted_as_name_type 1234 +#define invalid_import_from_as_name_type 1235 +#define invalid_import_from_targets_type 1236 +#define invalid_with_stmt_type 1237 +#define invalid_with_stmt_indent_type 1238 +#define invalid_try_stmt_type 1239 +#define invalid_except_stmt_type 1240 +#define invalid_except_star_stmt_type 1241 +#define invalid_finally_stmt_type 1242 +#define invalid_except_stmt_indent_type 1243 +#define invalid_except_star_stmt_indent_type 1244 +#define invalid_match_stmt_type 1245 +#define invalid_case_block_type 1246 +#define invalid_as_pattern_type 1247 +#define invalid_class_pattern_type 1248 +#define invalid_class_argument_pattern_type 1249 +#define invalid_if_stmt_type 1250 +#define invalid_elif_stmt_type 1251 +#define invalid_else_stmt_type 1252 +#define invalid_while_stmt_type 1253 +#define invalid_for_stmt_type 1254 +#define invalid_def_raw_type 1255 +#define invalid_class_def_raw_type 1256 +#define invalid_double_starred_kvpairs_type 1257 +#define invalid_kvpair_type 1258 +#define invalid_starred_expression_unpacking_type 1259 +#define invalid_starred_expression_type 1260 +#define invalid_fstring_replacement_field_type 1261 +#define invalid_fstring_conversion_character_type 1262 +#define invalid_tstring_replacement_field_type 1263 +#define invalid_tstring_conversion_character_type 1264 +#define invalid_arithmetic_type 1265 +#define invalid_factor_type 1266 +#define invalid_type_params_type 1267 +#define _loop0_1_type 1268 +#define _loop1_2_type 1269 +#define _loop0_3_type 1270 +#define _gather_4_type 1271 +#define _tmp_5_type 1272 +#define _tmp_6_type 1273 +#define _tmp_7_type 1274 +#define _tmp_8_type 1275 +#define _tmp_9_type 1276 +#define _tmp_10_type 1277 +#define _tmp_11_type 1278 +#define _loop1_12_type 1279 +#define _tmp_13_type 1280 +#define _loop0_14_type 1281 +#define _gather_15_type 1282 +#define _tmp_16_type 1283 +#define _tmp_17_type 1284 +#define _loop0_18_type 1285 +#define _loop1_19_type 1286 +#define _loop0_20_type 1287 +#define _gather_21_type 1288 +#define _tmp_22_type 1289 +#define _loop0_23_type 1290 +#define _gather_24_type 1291 +#define _loop1_25_type 1292 +#define _tmp_26_type 1293 +#define _tmp_27_type 1294 +#define _loop0_28_type 1295 +#define _loop0_29_type 1296 +#define _loop1_30_type 1297 +#define _loop1_31_type 1298 +#define _loop0_32_type 1299 +#define _loop1_33_type 1300 +#define _loop0_34_type 1301 +#define _gather_35_type 1302 +#define _tmp_36_type 1303 +#define _loop1_37_type 1304 +#define _loop1_38_type 1305 +#define _loop1_39_type 1306 +#define _loop0_40_type 1307 +#define _gather_41_type 1308 +#define _tmp_42_type 1309 +#define _tmp_43_type 1310 +#define _tmp_44_type 1311 +#define _loop0_45_type 1312 +#define _gather_46_type 1313 +#define _loop0_47_type 1314 +#define _gather_48_type 1315 +#define _tmp_49_type 1316 +#define _loop0_50_type 1317 +#define _gather_51_type 1318 +#define _loop0_52_type 1319 +#define _gather_53_type 1320 +#define _loop0_54_type 1321 +#define _gather_55_type 1322 +#define _loop1_56_type 1323 +#define _loop1_57_type 1324 +#define _loop0_58_type 1325 +#define _gather_59_type 1326 +#define _loop1_60_type 1327 +#define _loop1_61_type 1328 +#define _loop1_62_type 1329 +#define _tmp_63_type 1330 +#define _loop0_64_type 1331 +#define _gather_65_type 1332 +#define _tmp_66_type 1333 +#define _tmp_67_type 1334 +#define _tmp_68_type 1335 +#define _tmp_69_type 1336 +#define _tmp_70_type 1337 +#define _loop0_71_type 1338 +#define _loop0_72_type 1339 +#define _loop1_73_type 1340 +#define _loop1_74_type 1341 +#define _loop0_75_type 1342 +#define _loop1_76_type 1343 +#define _loop0_77_type 1344 +#define _loop0_78_type 1345 +#define _loop0_79_type 1346 +#define _loop0_80_type 1347 +#define _loop1_81_type 1348 +#define _tmp_82_type 1349 +#define _loop0_83_type 1350 +#define _gather_84_type 1351 +#define _loop1_85_type 1352 +#define _loop0_86_type 1353 +#define _tmp_87_type 1354 +#define _loop0_88_type 1355 +#define _gather_89_type 1356 +#define _tmp_90_type 1357 +#define _loop0_91_type 1358 +#define _gather_92_type 1359 +#define _loop0_93_type 1360 +#define _gather_94_type 1361 +#define _loop0_95_type 1362 +#define _loop0_96_type 1363 +#define _gather_97_type 1364 +#define _loop1_98_type 1365 +#define _tmp_99_type 1366 +#define _loop0_100_type 1367 +#define _gather_101_type 1368 +#define _loop0_102_type 1369 +#define _gather_103_type 1370 +#define _tmp_104_type 1371 +#define _tmp_105_type 1372 +#define _loop0_106_type 1373 +#define _gather_107_type 1374 +#define _tmp_108_type 1375 +#define _tmp_109_type 1376 +#define _tmp_110_type 1377 +#define _tmp_111_type 1378 +#define _tmp_112_type 1379 +#define _loop1_113_type 1380 +#define _tmp_114_type 1381 +#define _tmp_115_type 1382 +#define _tmp_116_type 1383 +#define _tmp_117_type 1384 +#define _tmp_118_type 1385 +#define _loop0_119_type 1386 +#define _loop0_120_type 1387 +#define _tmp_121_type 1388 +#define _tmp_122_type 1389 +#define _tmp_123_type 1390 +#define _tmp_124_type 1391 +#define _tmp_125_type 1392 +#define _tmp_126_type 1393 +#define _tmp_127_type 1394 +#define _tmp_128_type 1395 +#define _tmp_129_type 1396 +#define _loop0_130_type 1397 +#define _gather_131_type 1398 +#define _tmp_132_type 1399 +#define _tmp_133_type 1400 +#define _tmp_134_type 1401 +#define _tmp_135_type 1402 +#define _loop0_136_type 1403 +#define _gather_137_type 1404 +#define _tmp_138_type 1405 +#define _loop0_139_type 1406 +#define _gather_140_type 1407 +#define _loop0_141_type 1408 +#define _gather_142_type 1409 +#define _tmp_143_type 1410 +#define _loop0_144_type 1411 +#define _tmp_145_type 1412 +#define _tmp_146_type 1413 +#define _tmp_147_type 1414 +#define _tmp_148_type 1415 +#define _tmp_149_type 1416 +#define _tmp_150_type 1417 +#define _tmp_151_type 1418 +#define _tmp_152_type 1419 +#define _tmp_153_type 1420 +#define _tmp_154_type 1421 +#define _tmp_155_type 1422 +#define _tmp_156_type 1423 +#define _tmp_157_type 1424 +#define _tmp_158_type 1425 +#define _tmp_159_type 1426 +#define _tmp_160_type 1427 +#define _tmp_161_type 1428 +#define _tmp_162_type 1429 +#define _tmp_163_type 1430 +#define _tmp_164_type 1431 +#define _tmp_165_type 1432 +#define _tmp_166_type 1433 +#define _tmp_167_type 1434 +#define _tmp_168_type 1435 +#define _tmp_169_type 1436 +#define _tmp_170_type 1437 +#define _loop0_171_type 1438 +#define _tmp_172_type 1439 +#define _tmp_173_type 1440 +#define _tmp_174_type 1441 +#define _tmp_175_type 1442 +#define _tmp_176_type 1443 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -742,6 +743,7 @@ static void *invalid_expression_rule(Parser *p); static void *invalid_named_expression_rule(Parser *p); static void *invalid_assignment_rule(Parser *p); static expr_ty invalid_ann_assign_target_rule(Parser *p); +static void *invalid_raise_stmt_rule(Parser *p); static void *invalid_del_stmt_rule(Parser *p); static void *invalid_block_rule(Parser *p); static void *invalid_comprehension_rule(Parser *p); @@ -1698,7 +1700,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); stmt_ty raise_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 525) // token='raise' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 627) // token='raise' && (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt ) @@ -1740,7 +1742,7 @@ simple_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); stmt_ty del_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 625) // token='del' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 629) // token='del' && (del_stmt_var = del_stmt_rule(p)) // del_stmt ) @@ -1936,7 +1938,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); stmt_ty if_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 682) // token='if' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 686) // token='if' && (if_stmt_var = if_stmt_rule(p)) // if_stmt ) @@ -2020,7 +2022,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); stmt_ty try_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 656) // token='try' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 660) // token='try' && (try_stmt_var = try_stmt_rule(p)) // try_stmt ) @@ -2041,7 +2043,7 @@ compound_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); stmt_ty while_stmt_var; if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 689) // token='while' + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 693) // token='while' && (while_stmt_var = while_stmt_rule(p)) // while_stmt ) @@ -2767,7 +2769,7 @@ return_stmt_rule(Parser *p) return _res; } -// raise_stmt: 'raise' expression ['from' expression] | 'raise' +// raise_stmt: invalid_raise_stmt | 'raise' expression ['from' expression] | 'raise' static stmt_ty raise_stmt_rule(Parser *p) { @@ -2789,6 +2791,25 @@ raise_stmt_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro + if (p->call_invalid_rules) { // invalid_raise_stmt + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_raise_stmt")); + void *invalid_raise_stmt_var; + if ( + (invalid_raise_stmt_var = invalid_raise_stmt_rule(p)) // invalid_raise_stmt + ) + { + D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_raise_stmt")); + _res = invalid_raise_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_raise_stmt")); + } { // 'raise' expression ['from' expression] if (p->error_indicator) { p->level--; @@ -2799,7 +2820,7 @@ raise_stmt_rule(Parser *p) expr_ty a; void *b; if ( - (_keyword = _PyPegen_expect_token(p, 525)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 627)) // token='raise' && (a = expression_rule(p)) // expression && @@ -2836,7 +2857,7 @@ raise_stmt_rule(Parser *p) D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 525)) // token='raise' + (_keyword = _PyPegen_expect_token(p, 627)) // token='raise' ) { D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'")); @@ -3209,7 +3230,7 @@ del_stmt_rule(Parser *p) Token * _keyword; asdl_expr_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='del' + (_keyword = _PyPegen_expect_token(p, 629)) // token='del' && (a = del_targets_rule(p)) // del_targets && @@ -3498,7 +3519,7 @@ import_name_rule(Parser *p) Token * _keyword; asdl_alias_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='import' + (_keyword = _PyPegen_expect_token(p, 638)) // token='import' && (a = dotted_as_names_rule(p)) // dotted_as_names ) @@ -3567,13 +3588,13 @@ import_from_rule(Parser *p) expr_ty b; asdl_alias_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 633)) // token='from' + (_keyword = _PyPegen_expect_token(p, 637)) // token='from' && (a = _loop0_18_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && - (_keyword_1 = _PyPegen_expect_token(p, 634)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 638)) // token='import' && (c = import_from_targets_rule(p)) // import_from_targets ) @@ -3611,11 +3632,11 @@ import_from_rule(Parser *p) asdl_seq * a; asdl_alias_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 633)) // token='from' + (_keyword = _PyPegen_expect_token(p, 637)) // token='from' && (a = _loop1_19_rule(p)) // (('.' | '...'))+ && - (_keyword_1 = _PyPegen_expect_token(p, 634)) // token='import' + (_keyword_1 = _PyPegen_expect_token(p, 638)) // token='import' && (b = import_from_targets_rule(p)) // import_from_targets ) @@ -4402,7 +4423,7 @@ class_def_raw_rule(Parser *p) asdl_stmt_seq* c; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 701)) // token='class' + (_keyword = _PyPegen_expect_token(p, 705)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && @@ -4569,7 +4590,7 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 699)) // token='def' + (_keyword = _PyPegen_expect_token(p, 703)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4630,9 +4651,9 @@ function_def_raw_rule(Parser *p) void *t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='def' + (_keyword_1 = _PyPegen_expect_token(p, 703)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -5970,7 +5991,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6015,7 +6036,7 @@ if_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (a = named_expression_rule(p)) // named_expression && @@ -6110,7 +6131,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; stmt_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 687)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 691)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6155,7 +6176,7 @@ elif_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 687)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 691)) // token='elif' && (a = named_expression_rule(p)) // named_expression && @@ -6236,7 +6257,7 @@ else_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='else' + (_keyword = _PyPegen_expect_token(p, 690)) // token='else' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -6315,7 +6336,7 @@ while_stmt_rule(Parser *p) asdl_stmt_seq* b; void *c; if ( - (_keyword = _PyPegen_expect_token(p, 689)) // token='while' + (_keyword = _PyPegen_expect_token(p, 693)) // token='while' && (a = named_expression_rule(p)) // named_expression && @@ -6415,11 +6436,11 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword = _PyPegen_expect_token(p, 698)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in' && (_cut_var = 1) && @@ -6477,13 +6498,13 @@ for_stmt_rule(Parser *p) expr_ty t; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 698)) // token='for' && (t = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 699)) // token='in' && (_cut_var = 1) && @@ -6612,7 +6633,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 647)) // token='with' + (_keyword = _PyPegen_expect_token(p, 651)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6663,7 +6684,7 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 647)) // token='with' + (_keyword = _PyPegen_expect_token(p, 651)) // token='with' && (a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+ && @@ -6712,9 +6733,9 @@ with_stmt_rule(Parser *p) asdl_withitem_seq* a; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 647)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -6764,9 +6785,9 @@ with_stmt_rule(Parser *p) asdl_stmt_seq* b; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 647)) // token='with' + (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='with' && (a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+ && @@ -6852,7 +6873,7 @@ with_item_rule(Parser *p) if ( (e = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && (t = star_target_rule(p)) // star_target && @@ -6977,7 +6998,7 @@ try_stmt_rule(Parser *p) asdl_stmt_seq* b; asdl_stmt_seq* f; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='try' + (_keyword = _PyPegen_expect_token(p, 660)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7021,7 +7042,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='try' + (_keyword = _PyPegen_expect_token(p, 660)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7069,7 +7090,7 @@ try_stmt_rule(Parser *p) asdl_excepthandler_seq* ex; void *f; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='try' + (_keyword = _PyPegen_expect_token(p, 660)) // token='try' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7168,7 +7189,7 @@ except_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (e = expression_rule(p)) // expression && @@ -7212,11 +7233,11 @@ except_block_rule(Parser *p) expr_ty e; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (e = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as' && (t = _PyPegen_name_token(p)) // NAME && @@ -7258,7 +7279,7 @@ except_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (e = expressions_rule(p)) // expressions && @@ -7299,7 +7320,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -7411,7 +7432,7 @@ except_star_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7458,13 +7479,13 @@ except_star_block_rule(Parser *p) expr_ty e; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (e = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as' && (t = _PyPegen_name_token(p)) // NAME && @@ -7507,7 +7528,7 @@ except_star_block_rule(Parser *p) asdl_stmt_seq* b; expr_ty e; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -7607,7 +7628,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_stmt_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 673)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 677)) // token='finally' && (_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':' && @@ -7915,7 +7936,7 @@ guard_rule(Parser *p) Token * _keyword; expr_ty guard; if ( - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (guard = named_expression_rule(p)) // named_expression ) @@ -8110,7 +8131,7 @@ as_pattern_rule(Parser *p) if ( (pattern = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && (target = pattern_capture_target_rule(p)) // pattern_capture_target ) @@ -11407,11 +11428,11 @@ expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else' && (c = expression_rule(p)) // expression ) @@ -11517,7 +11538,7 @@ yield_expr_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 587)) // token='yield' && - (_keyword_1 = _PyPegen_expect_token(p, 633)) // token='from' + (_keyword_1 = _PyPegen_expect_token(p, 637)) // token='from' && (a = expression_rule(p)) // expression ) @@ -12293,7 +12314,7 @@ inversion_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 703)) // token='not' + (_keyword = _PyPegen_expect_token(p, 707)) // token='not' && (a = inversion_rule(p)) // inversion ) @@ -12947,9 +12968,9 @@ notin_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 703)) // token='not' + (_keyword = _PyPegen_expect_token(p, 707)) // token='not' && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -12995,7 +13016,7 @@ in_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword = _PyPegen_expect_token(p, 699)) // token='in' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -13044,7 +13065,7 @@ isnot_bitwise_or_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 596)) // token='is' && - (_keyword_1 = _PyPegen_expect_token(p, 703)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 707)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -17651,13 +17672,13 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' && - (_keyword_1 = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword_1 = _PyPegen_expect_token(p, 698)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_2 = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword_2 = _PyPegen_expect_token(p, 699)) // token='in' && (_cut_var = 1) && @@ -17696,11 +17717,11 @@ for_if_clause_rule(Parser *p) expr_ty b; asdl_expr_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword = _PyPegen_expect_token(p, 698)) // token='for' && (a = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in' && (_cut_var = 1) && @@ -21001,11 +21022,11 @@ expression_without_invalid_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else' && (c = expression_rule(p)) // expression ) @@ -21305,7 +21326,7 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (b = disjunction_rule(p)) // disjunction && @@ -21338,11 +21359,11 @@ invalid_expression_rule(Parser *p) if ( (a = disjunction_rule(p)) // disjunction && - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else' && _PyPegen_lookahead(0, (void *(*)(Parser *)) expression_rule, p) ) @@ -21374,11 +21395,11 @@ invalid_expression_rule(Parser *p) if ( (a = (stmt_ty)_tmp_116_rule(p)) // pass_stmt | break_stmt | continue_stmt && - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (b = disjunction_rule(p)) // disjunction && - (_keyword_1 = _PyPegen_expect_token(p, 686)) // token='else' + (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else' && (c = simple_stmt_rule(p)) // simple_stmt ) @@ -21889,6 +21910,82 @@ invalid_ann_assign_target_rule(Parser *p) return _res; } +// invalid_raise_stmt: 'raise' 'from' | 'raise' expression 'from' +static void * +invalid_raise_stmt_rule(Parser *p) +{ + if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) { + _Pypegen_stack_overflow(p); + } + if (p->error_indicator) { + p->level--; + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'raise' 'from' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' 'from'")); + Token * a; + Token * b; + if ( + (a = _PyPegen_expect_token(p, 627)) // token='raise' + && + (b = _PyPegen_expect_token(p, 637)) // token='from' + ) + { + D(fprintf(stderr, "%*c+ invalid_raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' 'from'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_RANGE ( a , b , "did you forget an expression between 'raise' and 'from'?" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' 'from'")); + } + { // 'raise' expression 'from' + if (p->error_indicator) { + p->level--; + return NULL; + } + D(fprintf(stderr, "%*c> invalid_raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' expression 'from'")); + Token * _keyword; + Token * a; + expr_ty expression_var; + if ( + (_keyword = _PyPegen_expect_token(p, 627)) // token='raise' + && + (expression_var = expression_rule(p)) // expression + && + (a = _PyPegen_expect_token(p, 637)) // token='from' + ) + { + D(fprintf(stderr, "%*c+ invalid_raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression 'from'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "did you forget an expression after 'from'?" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + p->level--; + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' expression 'from'")); + } + _res = NULL; + done: + p->level--; + return _res; +} + // invalid_del_stmt: 'del' star_expressions static void * invalid_del_stmt_rule(Parser *p) @@ -21911,7 +22008,7 @@ invalid_del_stmt_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 625)) // token='del' + (_keyword = _PyPegen_expect_token(p, 629)) // token='del' && (a = star_expressions_rule(p)) // star_expressions ) @@ -23347,7 +23444,7 @@ invalid_with_item_rule(Parser *p) if ( (expression_var = expression_rule(p)) // expression && - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && (a = expression_rule(p)) // expression && @@ -23397,13 +23494,13 @@ invalid_for_if_clause_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings void *_tmp_135_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword = _PyPegen_expect_token(p, 698)) // token='for' && (_tmp_135_var = _tmp_135_rule(p)) // bitwise_or ((',' bitwise_or))* ','? && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 695) // token='in' + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 699) // token='in' ) { D(fprintf(stderr, "%*c+ invalid_for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'")); @@ -23449,9 +23546,9 @@ invalid_for_target_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings expr_ty a; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword = _PyPegen_expect_token(p, 698)) // token='for' && (a = star_expressions_rule(p)) // star_expressions ) @@ -23581,11 +23678,11 @@ invalid_import_rule(Parser *p) Token * a; expr_ty dotted_name_var; if ( - (a = _PyPegen_expect_token(p, 634)) // token='import' + (a = _PyPegen_expect_token(p, 638)) // token='import' && (_gather_137_var = _gather_137_rule(p)) // ','.dotted_name+ && - (_keyword = _PyPegen_expect_token(p, 633)) // token='from' + (_keyword = _PyPegen_expect_token(p, 637)) // token='from' && (dotted_name_var = dotted_name_rule(p)) // dotted_name ) @@ -23612,7 +23709,7 @@ invalid_import_rule(Parser *p) Token * _keyword; Token * token; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='import' + (_keyword = _PyPegen_expect_token(p, 638)) // token='import' && (token = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -23661,7 +23758,7 @@ invalid_dotted_as_name_rule(Parser *p) if ( (dotted_name_var = dotted_name_rule(p)) // dotted_name && - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && _PyPegen_lookahead(0, (void *(*)(Parser *)) _tmp_138_rule, p) && @@ -23712,7 +23809,7 @@ invalid_import_from_as_name_rule(Parser *p) if ( (name_var = _PyPegen_name_token(p)) // NAME && - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && _PyPegen_lookahead(0, (void *(*)(Parser *)) _tmp_138_rule, p) && @@ -23838,9 +23935,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 647)) // token='with' + (_keyword = _PyPegen_expect_token(p, 651)) // token='with' && (_gather_140_var = _gather_140_rule(p)) // ','.(expression ['as' star_target])+ && @@ -23876,9 +23973,9 @@ invalid_with_stmt_rule(Parser *p) UNUSED(_opt_var_1); // Silence compiler warnings Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 647)) // token='with' + (_keyword = _PyPegen_expect_token(p, 651)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -23938,9 +24035,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 647)) // token='with' + (a = _PyPegen_expect_token(p, 651)) // token='with' && (_gather_140_var = _gather_140_rule(p)) // ','.(expression ['as' star_target])+ && @@ -23981,9 +24078,9 @@ invalid_with_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 647)) // token='with' + (a = _PyPegen_expect_token(p, 651)) // token='with' && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && @@ -24046,7 +24143,7 @@ invalid_try_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 656)) // token='try' + (a = _PyPegen_expect_token(p, 660)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24078,7 +24175,7 @@ invalid_try_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='try' + (_keyword = _PyPegen_expect_token(p, 660)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24117,7 +24214,7 @@ invalid_try_stmt_rule(Parser *p) Token * b; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='try' + (_keyword = _PyPegen_expect_token(p, 660)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24125,7 +24222,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_37_var = _loop1_37_rule(p)) // except_block+ && - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (b = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24164,7 +24261,7 @@ invalid_try_stmt_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings Token * a; if ( - (_keyword = _PyPegen_expect_token(p, 656)) // token='try' + (_keyword = _PyPegen_expect_token(p, 660)) // token='try' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24172,7 +24269,7 @@ invalid_try_stmt_rule(Parser *p) && (_loop1_38_var = _loop1_38_rule(p)) // except_star_block+ && - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [expression ['as' NAME]] && @@ -24229,7 +24326,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expressions_var; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (a = expression_rule(p)) // expression && @@ -24237,7 +24334,7 @@ invalid_except_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24269,7 +24366,7 @@ invalid_except_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -24300,7 +24397,7 @@ invalid_except_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) @@ -24331,11 +24428,11 @@ invalid_except_stmt_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (expression_var = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as' && (a = expression_rule(p)) // expression && @@ -24395,7 +24492,7 @@ invalid_except_star_stmt_rule(Parser *p) expr_ty expressions_var; expr_ty name_var; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24405,7 +24502,7 @@ invalid_except_star_stmt_rule(Parser *p) && (expressions_var = expressions_rule(p)) // expressions && - (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -24438,7 +24535,7 @@ invalid_except_star_stmt_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24472,7 +24569,7 @@ invalid_except_star_stmt_rule(Parser *p) void *_tmp_146_var; Token * a; if ( - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24506,13 +24603,13 @@ invalid_except_star_stmt_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty expression_var; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && (expression_var = expression_rule(p)) // expression && - (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as' && (a = expression_rule(p)) // expression && @@ -24563,7 +24660,7 @@ invalid_finally_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 673)) // token='finally' + (a = _PyPegen_expect_token(p, 677)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24619,7 +24716,7 @@ invalid_except_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (expression_var = expression_rule(p)) // expression && @@ -24655,7 +24752,7 @@ invalid_except_stmt_indent_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -24711,7 +24808,7 @@ invalid_except_star_stmt_indent_rule(Parser *p) expr_ty expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 677)) // token='except' + (a = _PyPegen_expect_token(p, 681)) // token='except' && (_literal = _PyPegen_expect_token(p, 16)) // token='*' && @@ -24950,7 +25047,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && (a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"' ) @@ -24980,7 +25077,7 @@ invalid_as_pattern_rule(Parser *p) if ( (or_pattern_var = or_pattern_rule(p)) // or_pattern && - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && (a = expression_rule(p)) // expression ) @@ -25132,7 +25229,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25163,7 +25260,7 @@ invalid_if_stmt_rule(Parser *p) expr_ty a_1; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 682)) // token='if' + (a = _PyPegen_expect_token(p, 686)) // token='if' && (a_1 = named_expression_rule(p)) // named_expression && @@ -25218,7 +25315,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 687)) // token='elif' + (_keyword = _PyPegen_expect_token(p, 691)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25249,7 +25346,7 @@ invalid_elif_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 687)) // token='elif' + (a = _PyPegen_expect_token(p, 691)) // token='elif' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25302,7 +25399,7 @@ invalid_else_stmt_rule(Parser *p) Token * a; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 686)) // token='else' + (a = _PyPegen_expect_token(p, 690)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -25335,13 +25432,13 @@ invalid_else_stmt_rule(Parser *p) Token * _literal; asdl_stmt_seq* block_var; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='else' + (_keyword = _PyPegen_expect_token(p, 690)) // token='else' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (block_var = block_rule(p)) // block && - (_keyword_1 = _PyPegen_expect_token(p, 687)) // token='elif' + (_keyword_1 = _PyPegen_expect_token(p, 691)) // token='elif' ) { D(fprintf(stderr, "%*c+ invalid_else_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':' block 'elif'")); @@ -25388,7 +25485,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 689)) // token='while' + (_keyword = _PyPegen_expect_token(p, 693)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25419,7 +25516,7 @@ invalid_while_stmt_rule(Parser *p) expr_ty named_expression_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 689)) // token='while' + (a = _PyPegen_expect_token(p, 693)) // token='while' && (named_expression_var = named_expression_rule(p)) // named_expression && @@ -25478,13 +25575,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword = _PyPegen_expect_token(p, 698)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword_1 = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -25519,13 +25616,13 @@ invalid_for_stmt_rule(Parser *p) expr_ty star_expressions_var; expr_ty star_targets_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 694)) // token='for' + (a = _PyPegen_expect_token(p, 698)) // token='for' && (star_targets_var = star_targets_rule(p)) // star_targets && - (_keyword = _PyPegen_expect_token(p, 695)) // token='in' + (_keyword = _PyPegen_expect_token(p, 699)) // token='in' && (star_expressions_var = star_expressions_rule(p)) // star_expressions && @@ -25591,9 +25688,9 @@ invalid_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (a = _PyPegen_expect_token(p, 699)) // token='def' + (a = _PyPegen_expect_token(p, 703)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -25650,9 +25747,9 @@ invalid_def_raw_rule(Parser *p) asdl_stmt_seq* block_var; expr_ty name_var; if ( - (_opt_var = _PyPegen_expect_token(p, 698), !p->error_indicator) // 'async'? + (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'? && - (_keyword = _PyPegen_expect_token(p, 699)) // token='def' + (_keyword = _PyPegen_expect_token(p, 703)) // token='def' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -25716,7 +25813,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (_keyword = _PyPegen_expect_token(p, 701)) // token='class' + (_keyword = _PyPegen_expect_token(p, 705)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -25755,7 +25852,7 @@ invalid_class_def_raw_rule(Parser *p) expr_ty name_var; Token * newline_var; if ( - (a = _PyPegen_expect_token(p, 701)) // token='class' + (a = _PyPegen_expect_token(p, 705)) // token='class' && (name_var = _PyPegen_name_token(p)) // NAME && @@ -27015,7 +27112,7 @@ invalid_arithmetic_rule(Parser *p) && (_tmp_153_var = _tmp_153_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@' && - (a = _PyPegen_expect_token(p, 703)) // token='not' + (a = _PyPegen_expect_token(p, 707)) // token='not' && (b = inversion_rule(p)) // inversion ) @@ -27064,7 +27161,7 @@ invalid_factor_rule(Parser *p) if ( (_tmp_154_var = _tmp_154_rule(p)) // '+' | '-' | '~' && - (a = _PyPegen_expect_token(p, 703)) // token='not' + (a = _PyPegen_expect_token(p, 707)) // token='not' && (b = factor_rule(p)) // factor ) @@ -27411,7 +27508,7 @@ _tmp_5_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 634)) // token='import' + (_keyword = _PyPegen_expect_token(p, 638)) // token='import' ) { D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); @@ -27430,7 +27527,7 @@ _tmp_5_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 633)) // token='from' + (_keyword = _PyPegen_expect_token(p, 637)) // token='from' ) { D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); @@ -27468,7 +27565,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 699)) // token='def' + (_keyword = _PyPegen_expect_token(p, 703)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -27506,7 +27603,7 @@ _tmp_6_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -27544,7 +27641,7 @@ _tmp_7_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 701)) // token='class' + (_keyword = _PyPegen_expect_token(p, 705)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -27601,7 +27698,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 647)) // token='with' + (_keyword = _PyPegen_expect_token(p, 651)) // token='with' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); @@ -27620,7 +27717,7 @@ _tmp_8_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -27658,7 +27755,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 694)) // token='for' + (_keyword = _PyPegen_expect_token(p, 698)) // token='for' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); @@ -27677,7 +27774,7 @@ _tmp_9_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 698)) // token='async' + (_keyword = _PyPegen_expect_token(p, 702)) // token='async' ) { D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'")); @@ -27902,7 +27999,7 @@ _tmp_13_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 633)) // token='from' + (_keyword = _PyPegen_expect_token(p, 637)) // token='from' && (z = expression_rule(p)) // expression ) @@ -28424,7 +28521,7 @@ _tmp_22_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -34188,7 +34285,7 @@ _tmp_115_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 686)) // token='else' + (_keyword = _PyPegen_expect_token(p, 690)) // token='else' ) { D(fprintf(stderr, "%*c+ _tmp_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'")); @@ -35927,7 +36024,7 @@ _tmp_143_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 677)) // token='except' + (_keyword = _PyPegen_expect_token(p, 681)) // token='except' ) { D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'")); @@ -35946,7 +36043,7 @@ _tmp_143_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 673)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 677)) // token='finally' ) { D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'")); @@ -37169,7 +37266,7 @@ _tmp_163_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 682)) // token='if' + (_keyword = _PyPegen_expect_token(p, 686)) // token='if' && (z = disjunction_rule(p)) // disjunction ) @@ -37827,7 +37924,7 @@ _tmp_176_rule(Parser *p) Token * _keyword; expr_ty star_target_var; if ( - (_keyword = _PyPegen_expect_token(p, 680)) // token='as' + (_keyword = _PyPegen_expect_token(p, 684)) // token='as' && (star_target_var = star_target_rule(p)) // star_target ) diff --git a/Python/bytecodes.c b/Python/bytecodes.c index f02e32fd1d3..c4b13da5db4 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -3125,100 +3125,49 @@ dummy_func( } replaced op(_FOR_ITER, (iter, null_or_index -- iter, null_or_index, next)) { - /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */ - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - if (PyStackRef_IsTaggedInt(null_or_index)) { - next = _PyForIter_NextWithIndex(iter_o, null_or_index); - if (PyStackRef_IsNull(next)) { - JUMPBY(oparg + 1); - DISPATCH(); - } - null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); - } - else { - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); - if (next_o == NULL) { - if (_PyErr_Occurred(tstate)) { - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - if (!matches) { - ERROR_NO_POP(); - } - _PyEval_MonitorRaise(tstate, frame, this_instr); - _PyErr_Clear(tstate); - } - /* iterator ended normally */ - assert(next_instr[oparg].op.code == END_FOR || - next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - /* Jump forward oparg, then skip following END_FOR */ - JUMPBY(oparg + 1); - DISPATCH(); + _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index); + if (!PyStackRef_IsValid(item)) { + if (PyStackRef_IsError(item)) { + ERROR_NO_POP(); } - next = PyStackRef_FromPyObjectSteal(next_o); + // Jump forward by oparg and skip the following END_FOR + JUMPBY(oparg + 1); + DISPATCH(); } + next = item; } op(_FOR_ITER_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) { - /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */ - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - EXIT_IF(!PyStackRef_IsNull(null_or_index)); - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); - if (next_o == NULL) { - if (_PyErr_Occurred(tstate)) { - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - if (!matches) { - ERROR_NO_POP(); - } - _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr); - _PyErr_Clear(tstate); + _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index); + if (!PyStackRef_IsValid(item)) { + if (PyStackRef_IsError(item)) { + ERROR_NO_POP(); } /* iterator ended normally */ /* The translator sets the deopt target just past the matching END_FOR */ EXIT_IF(true); } - next = PyStackRef_FromPyObjectSteal(next_o); - // Common case: no jump, leave it to the code generator + next = item; } + macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER; inst(INSTRUMENTED_FOR_ITER, (unused/1, iter, null_or_index -- iter, null_or_index, next)) { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - if (PyStackRef_IsTaggedInt(null_or_index)) { - next = _PyForIter_NextWithIndex(iter_o, null_or_index); - if (PyStackRef_IsNull(next)) { - JUMPBY(oparg + 1); - DISPATCH(); - } - null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); - INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); - } - else { - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); - if (next_o != NULL) { - next = PyStackRef_FromPyObjectSteal(next_o); - INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); - } - else { - if (_PyErr_Occurred(tstate)) { - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - if (!matches) { - ERROR_NO_POP(); - } - _PyEval_MonitorRaise(tstate, frame, this_instr); - _PyErr_Clear(tstate); - } - /* iterator ended normally */ - assert(next_instr[oparg].op.code == END_FOR || - next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - /* Skip END_FOR */ - JUMPBY(oparg + 1); - DISPATCH(); + _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index); + if (!PyStackRef_IsValid(item)) { + if (PyStackRef_IsError(item)) { + ERROR_NO_POP(); } + // Jump forward by oparg and skip the following END_FOR + JUMPBY(oparg + 1); + DISPATCH(); } + next = item; + INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); } - op(_ITER_CHECK_LIST, (iter, null_or_index -- iter, null_or_index)) { PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); EXIT_IF(Py_TYPE(iter_o) != &PyList_Type); diff --git a/Python/ceval.c b/Python/ceval.c index 5ea837e1a6e..4cfe4bb88f4 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -3439,8 +3439,8 @@ _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *na return value; } -_PyStackRef -_PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index) +static _PyStackRef +foriter_next(PyObject *seq, _PyStackRef index) { assert(PyStackRef_IsTaggedInt(index)); assert(PyTuple_CheckExact(seq) || PyList_CheckExact(seq)); @@ -3459,6 +3459,30 @@ _PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index) return PyStackRef_FromPyObjectSteal(item); } +_PyStackRef _PyForIter_VirtualIteratorNext(PyThreadState* tstate, _PyInterpreterFrame* frame, _PyStackRef iter, _PyStackRef* index_ptr) +{ + PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); + _PyStackRef index = *index_ptr; + if (PyStackRef_IsTaggedInt(index)) { + *index_ptr = PyStackRef_IncrementTaggedIntNoOverflow(index); + return foriter_next(iter_o, index); + } + PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); + if (next_o == NULL) { + if (_PyErr_Occurred(tstate)) { + if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) { + _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr); + _PyErr_Clear(tstate); + } + else { + return PyStackRef_ERROR; + } + } + return PyStackRef_NULL; + } + return PyStackRef_FromPyObjectSteal(next_o); +} + /* Check if a 'cls' provides the given special method. */ static inline int type_has_special_method(PyTypeObject *cls, PyObject *name) diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 35b29940cb4..d19605169d5 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -4268,33 +4268,20 @@ _PyStackRef next; null_or_index = stack_pointer[-1]; iter = stack_pointer[-2]; - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - if (!PyStackRef_IsNull(null_or_index)) { - UOP_STAT_INC(uopcode, miss); - JUMP_TO_JUMP_TARGET(); - } _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); + _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index); stack_pointer = _PyFrame_GetStackPointer(frame); - if (next_o == NULL) { - if (_PyErr_Occurred(tstate)) { - _PyFrame_SetStackPointer(frame, stack_pointer); - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (!matches) { - JUMP_TO_ERROR(); - } - _PyFrame_SetStackPointer(frame, stack_pointer); - _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr); - _PyErr_Clear(tstate); - stack_pointer = _PyFrame_GetStackPointer(frame); + if (!PyStackRef_IsValid(item)) { + if (PyStackRef_IsError(item)) { + JUMP_TO_ERROR(); } if (true) { UOP_STAT_INC(uopcode, miss); JUMP_TO_JUMP_TARGET(); } } - next = PyStackRef_FromPyObjectSteal(next_o); + next = item; + stack_pointer[-1] = null_or_index; stack_pointer[0] = next; stack_pointer += 1; assert(WITHIN_STACK_BOUNDS()); diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index e3cd3b71a1d..c8825df3ade 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -5753,41 +5753,18 @@ } // _FOR_ITER { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - if (PyStackRef_IsTaggedInt(null_or_index)) { - _PyFrame_SetStackPointer(frame, stack_pointer); - next = _PyForIter_NextWithIndex(iter_o, null_or_index); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (PyStackRef_IsNull(next)) { - JUMPBY(oparg + 1); - DISPATCH(); - } - null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); - } - else { - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (next_o == NULL) { - if (_PyErr_Occurred(tstate)) { - _PyFrame_SetStackPointer(frame, stack_pointer); - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (!matches) { - JUMP_TO_LABEL(error); - } - _PyFrame_SetStackPointer(frame, stack_pointer); - _PyEval_MonitorRaise(tstate, frame, this_instr); - _PyErr_Clear(tstate); - stack_pointer = _PyFrame_GetStackPointer(frame); - } - assert(next_instr[oparg].op.code == END_FOR || - next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - JUMPBY(oparg + 1); - DISPATCH(); + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (!PyStackRef_IsValid(item)) { + if (PyStackRef_IsError(item)) { + JUMP_TO_LABEL(error); } - next = PyStackRef_FromPyObjectSteal(next_o); + JUMPBY(oparg + 1); + stack_pointer[-1] = null_or_index; + DISPATCH(); } + next = item; } stack_pointer[-1] = null_or_index; stack_pointer[0] = next; @@ -7059,45 +7036,19 @@ /* Skip 1 cache entry */ null_or_index = stack_pointer[-1]; iter = stack_pointer[-2]; - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - if (PyStackRef_IsTaggedInt(null_or_index)) { - _PyFrame_SetStackPointer(frame, stack_pointer); - next = _PyForIter_NextWithIndex(iter_o, null_or_index); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (PyStackRef_IsNull(next)) { - JUMPBY(oparg + 1); - DISPATCH(); - } - null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); - INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); - } - else { - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (next_o != NULL) { - next = PyStackRef_FromPyObjectSteal(next_o); - INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); - } - else { - if (_PyErr_Occurred(tstate)) { - _PyFrame_SetStackPointer(frame, stack_pointer); - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - stack_pointer = _PyFrame_GetStackPointer(frame); - if (!matches) { - JUMP_TO_LABEL(error); - } - _PyFrame_SetStackPointer(frame, stack_pointer); - _PyEval_MonitorRaise(tstate, frame, this_instr); - _PyErr_Clear(tstate); - stack_pointer = _PyFrame_GetStackPointer(frame); - } - assert(next_instr[oparg].op.code == END_FOR || - next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - JUMPBY(oparg + 1); - DISPATCH(); + _PyFrame_SetStackPointer(frame, stack_pointer); + _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index); + stack_pointer = _PyFrame_GetStackPointer(frame); + if (!PyStackRef_IsValid(item)) { + if (PyStackRef_IsError(item)) { + JUMP_TO_LABEL(error); } + JUMPBY(oparg + 1); + stack_pointer[-1] = null_or_index; + DISPATCH(); } + next = item; + INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); stack_pointer[-1] = null_or_index; stack_pointer[0] = next; stack_pointer += 1; diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index e1209209660..b4220e2c627 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -840,6 +840,17 @@ dummy_func(void) { value = sym_new_unknown(ctx); } + op(_GET_ITER, (iterable -- iter, index_or_null)) { + if (sym_matches_type(iterable, &PyTuple_Type) || sym_matches_type(iterable, &PyList_Type)) { + iter = iterable; + index_or_null = sym_new_not_null(ctx); + } + else { + iter = sym_new_not_null(ctx); + index_or_null = sym_new_unknown(ctx); + } + } + op(_FOR_ITER_GEN_FRAME, (unused, unused -- unused, unused, gen_frame: _Py_UOpsAbstractFrame*)) { gen_frame = NULL; /* We are about to hit the end of the trace */ diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index db86edcc785..960c6838004 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -1562,10 +1562,18 @@ } case _GET_ITER: { + JitOptSymbol *iterable; JitOptSymbol *iter; JitOptSymbol *index_or_null; - iter = sym_new_not_null(ctx); - index_or_null = sym_new_not_null(ctx); + iterable = stack_pointer[-1]; + if (sym_matches_type(iterable, &PyTuple_Type) || sym_matches_type(iterable, &PyList_Type)) { + iter = iterable; + index_or_null = sym_new_not_null(ctx); + } + else { + iter = sym_new_not_null(ctx); + index_or_null = sym_new_unknown(ctx); + } stack_pointer[-1] = iter; stack_pointer[0] = index_or_null; stack_pointer += 1; diff --git a/Python/stackrefs.c b/Python/stackrefs.c index b2a1369031a..ecc0012ef17 100644 --- a/Python/stackrefs.c +++ b/Python/stackrefs.c @@ -40,6 +40,7 @@ make_table_entry(PyObject *obj, const char *filename, int linenumber) PyObject * _Py_stackref_get_object(_PyStackRef ref) { + assert(!PyStackRef_IsError(ref)); if (ref.index == 0) { return NULL; } @@ -64,6 +65,7 @@ PyStackRef_Is(_PyStackRef a, _PyStackRef b) PyObject * _Py_stackref_close(_PyStackRef ref, const char *filename, int linenumber) { + assert(!PyStackRef_IsError(ref)); PyInterpreterState *interp = PyInterpreterState_Get(); if (ref.index >= interp->next_stackref) { _Py_FatalErrorFormat(__func__, "Invalid StackRef with ID %" PRIu64 " at %s:%d\n", (void *)ref.index, filename, linenumber); @@ -128,6 +130,7 @@ _Py_stackref_create(PyObject *obj, const char *filename, int linenumber) void _Py_stackref_record_borrow(_PyStackRef ref, const char *filename, int linenumber) { + assert(!PyStackRef_IsError(ref)); if (ref.index < INITIAL_STACKREF_INDEX) { return; } @@ -152,6 +155,7 @@ _Py_stackref_record_borrow(_PyStackRef ref, const char *filename, int linenumber void _Py_stackref_associate(PyInterpreterState *interp, PyObject *obj, _PyStackRef ref) { + assert(!PyStackRef_IsError(ref)); assert(ref.index < INITIAL_STACKREF_INDEX); TableEntry *entry = make_table_entry(obj, "builtin-object", 0); if (entry == NULL) { diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 3070559db8a..1447f365336 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -681,6 +681,8 @@ NON_ESCAPING_FUNCTIONS = ( "PyStackRef_UntagInt", "PyStackRef_IncrementTaggedIntNoOverflow", "PyStackRef_IsNullOrInt", + "PyStackRef_IsError", + "PyStackRef_IsValid", ) diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py index 6b681775f48..df168afa888 100644 --- a/Tools/cases_generator/stack.py +++ b/Tools/cases_generator/stack.py @@ -496,7 +496,7 @@ class Storage: f"Expected '{undefined}' to be defined before '{out.name}'" else: undefined = out.name - while len(self.outputs) > self.peeks and not self.needs_defining(self.outputs[0]): + while len(self.outputs) > self.peeks and not self.needs_defining(self.outputs[self.peeks]): out = self.outputs.pop(self.peeks) self.stack.push(out) |