diff options
63 files changed, 2334 insertions, 974 deletions
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 63a28490043..7edadff2754 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -35,6 +35,7 @@ Objects/type* @markshannon Objects/codeobject.c @markshannon Objects/frameobject.c @markshannon Objects/call.c @markshannon +Objects/object.c @ZeroIntensity Python/ceval*.c @markshannon Python/ceval*.h @markshannon Python/codegen.c @markshannon @iritkatriel @@ -66,8 +67,8 @@ Doc/_static/** @AA-Turner @hugovk Doc/tools/** @AA-Turner @hugovk # runtime state/lifecycle -**/*pylifecycle* @ericsnowcurrently -**/*pystate* @ericsnowcurrently +**/*pylifecycle* @ericsnowcurrently @ZeroIntensity +**/*pystate* @ericsnowcurrently @ZeroIntensity **/*preconfig* @ericsnowcurrently **/*initconfig* @ericsnowcurrently **/*pathconfig* @ericsnowcurrently diff --git a/Doc/conf.py b/Doc/conf.py index b08f5452901..161c2986441 100644 --- a/Doc/conf.py +++ b/Doc/conf.py @@ -79,6 +79,10 @@ version, release = import_module('patchlevel').get_version_info() rst_epilog = f""" .. |python_version_literal| replace:: ``Python {version}`` .. |python_x_dot_y_literal| replace:: ``python{version}`` +.. |python_x_dot_y_t_literal| replace:: ``python{version}t`` +.. |python_x_dot_y_t_literal_config| replace:: ``python{version}t-config`` +.. |x_dot_y_b2_literal| replace:: ``{version}.0b2`` +.. |applications_python_version_literal| replace:: ``/Applications/Python {version}/`` .. |usr_local_bin_python_x_dot_y_literal| replace:: ``/usr/local/bin/python{version}`` .. Apparently this how you hack together a formatted link: diff --git a/Doc/howto/free-threading-extensions.rst b/Doc/howto/free-threading-extensions.rst index 175bb5dc831..02b45879ccf 100644 --- a/Doc/howto/free-threading-extensions.rst +++ b/Doc/howto/free-threading-extensions.rst @@ -6,8 +6,8 @@ C API Extension Support for Free Threading ****************************************** -Starting with the 3.13 release, CPython has experimental support for running -with the :term:`global interpreter lock` (GIL) disabled in a configuration +Starting with the 3.13 release, CPython has support for running with +the :term:`global interpreter lock` (GIL) disabled in a configuration called :term:`free threading`. This document describes how to adapt C API extensions to support free threading. diff --git a/Doc/howto/free-threading-python.rst b/Doc/howto/free-threading-python.rst index c33cef2c8e9..24069617c47 100644 --- a/Doc/howto/free-threading-python.rst +++ b/Doc/howto/free-threading-python.rst @@ -1,18 +1,21 @@ .. _freethreading-python-howto: -********************************************** -Python experimental support for free threading -********************************************** +********************************* +Python support for free threading +********************************* -Starting with the 3.13 release, CPython has experimental support for a build of +Starting with the 3.13 release, CPython has support for a build of Python called :term:`free threading` where the :term:`global interpreter lock` (GIL) is disabled. Free-threaded execution allows for full utilization of the available processing power by running threads in parallel on available CPU cores. While not all software will benefit from this automatically, programs designed with threading in mind will run faster on multi-core hardware. -**The free-threaded mode is experimental** and work is ongoing to improve it: -expect some bugs and a substantial single-threaded performance hit. +The free-threaded mode is working and continues to be improved, but +there is some additional overhead in single-threaded workloads compared +to the regular build. Additionally, third-party packages, in particular ones +with an :term:`extension module`, may not be ready for use in a +free-threaded build, and will re-enable the :term:`GIL`. This document describes the implications of free threading for Python code. See :ref:`freethreading-extensions-howto` for information on @@ -43,7 +46,7 @@ Identifying free-threaded Python ================================ To check if the current interpreter supports free-threading, :option:`python -VV <-V>` -and :data:`sys.version` contain "experimental free-threading build". +and :data:`sys.version` contain "free-threading build". The new :func:`sys._is_gil_enabled` function can be used to check whether the GIL is actually disabled in the running process. diff --git a/Doc/howto/perf_profiling.rst b/Doc/howto/perf_profiling.rst index b579d776576..96d757ac452 100644 --- a/Doc/howto/perf_profiling.rst +++ b/Doc/howto/perf_profiling.rst @@ -92,7 +92,7 @@ Then we can use ``perf report`` to analyze the data: | | | | | | | |--51.67%--_PyEval_EvalFrameDefault | | | | | - | | | | |--11.52%--_PyLong_Add + | | | | |--11.52%--_PyCompactLong_Add | | | | | | | | | | | |--2.97%--_PyObject_Malloc ... @@ -142,7 +142,7 @@ Instead, if we run the same experiment with ``perf`` support enabled we get: | | | | | | | |--51.81%--_PyEval_EvalFrameDefault | | | | | - | | | | |--13.77%--_PyLong_Add + | | | | |--13.77%--_PyCompactLong_Add | | | | | | | | | | | |--3.26%--_PyObject_Malloc diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst index 2ee4450698a..e00fe9c8145 100644 --- a/Doc/library/ctypes.rst +++ b/Doc/library/ctypes.rst @@ -882,7 +882,7 @@ invalid non-\ ``NULL`` pointers would crash Python):: Thread safety without the GIL ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -In Python 3.13, the :term:`GIL` may be disabled on :term:`experimental free threaded <free threading>` builds. +From Python 3.13 onward, the :term:`GIL` can be disabled on :term:`free threaded <free threading>` builds. In ctypes, reads and writes to a single object concurrently is safe, but not across multiple objects: .. code-block:: pycon diff --git a/Doc/library/email.header.rst b/Doc/library/email.header.rst index 219fad0d2f6..c3392a62b8e 100644 --- a/Doc/library/email.header.rst +++ b/Doc/library/email.header.rst @@ -178,16 +178,36 @@ The :mod:`email.header` module also provides the following convenient functions. Decode a message header value without converting the character set. The header value is in *header*. - This function returns a list of ``(decoded_string, charset)`` pairs containing - each of the decoded parts of the header. *charset* is ``None`` for non-encoded - parts of the header, otherwise a lower case string containing the name of the - character set specified in the encoded string. + For historical reasons, this function may return either: - Here's an example:: + 1. A list of pairs containing each of the decoded parts of the header, + ``(decoded_bytes, charset)``, where *decoded_bytes* is always an instance of + :class:`bytes`, and *charset* is either: + + - A lower case string containing the name of the character set specified. + + - ``None`` for non-encoded parts of the header. + + 2. A list of length 1 containing a pair ``(string, None)``, where + *string* is always an instance of :class:`str`. + + An :exc:`email.errors.HeaderParseError` may be raised when certain decoding + errors occur (e.g. a base64 decoding exception). + + Here are examples: >>> from email.header import decode_header >>> decode_header('=?iso-8859-1?q?p=F6stal?=') [(b'p\xf6stal', 'iso-8859-1')] + >>> decode_header('unencoded_string') + [('unencoded_string', None)] + >>> decode_header('bar =?utf-8?B?ZsOzbw==?=') + [(b'bar ', None), (b'f\xc3\xb3o', 'utf-8')] + + .. note:: + + This function exists for for backwards compatibility only. For + new code, we recommend using :class:`email.headerregistry.HeaderRegistry`. .. function:: make_header(decoded_seq, maxlinelen=None, header_name=None, continuation_ws=' ') @@ -203,3 +223,7 @@ The :mod:`email.header` module also provides the following convenient functions. :class:`Header` instance. Optional *maxlinelen*, *header_name*, and *continuation_ws* are as in the :class:`Header` constructor. + .. note:: + + This function exists for for backwards compatibility only, and is + not recommended for use in new code. diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 55e442b20ff..71f9999464a 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -1933,6 +1933,13 @@ always available. Unless explicitly noted otherwise, all variables are read-only interpreter is pre-release (alpha, beta, or release candidate) then the local and remote interpreters must be the same exact version. + .. audit-event:: remote_debugger_script script_path + + When the script is executed in the remote process, an + :ref:`auditing event <auditing>` + ``sys.remote_debugger_script`` is raised + with the path in the remote process. + .. availability:: Unix, Windows. .. versionadded:: 3.14 diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 7edcdcabdce..52fefd590da 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -102,7 +102,7 @@ CPU-bound tasks, as only one thread can execute Python bytecode at a time. Despite this, threads remain a useful tool for achieving concurrency in many scenarios. -As of Python 3.13, experimental :term:`free-threaded <free threading>` builds +As of Python 3.13, :term:`free-threaded <free threading>` builds can disable the GIL, enabling true parallel execution of threads, but this feature is not available by default (see :pep:`703`). diff --git a/Doc/reference/expressions.rst b/Doc/reference/expressions.rst index 2a550b504ca..3d3bf1d9840 100644 --- a/Doc/reference/expressions.rst +++ b/Doc/reference/expressions.rst @@ -406,8 +406,9 @@ brackets or curly braces. Variables used in the generator expression are evaluated lazily when the :meth:`~generator.__next__` method is called for the generator object (in the same fashion as normal generators). However, the iterable expression in the -leftmost :keyword:`!for` clause is immediately evaluated, so that an error -produced by it will be emitted at the point where the generator expression +leftmost :keyword:`!for` clause is immediately evaluated, and the +:term:`iterator` is immediately created for that iterable, so that an error +produced while creating the iterator will be emitted at the point where the generator expression is defined, rather than at the point where the first value is retrieved. Subsequent :keyword:`!for` clauses and any filter condition in the leftmost :keyword:`!for` clause cannot be evaluated in the enclosing scope as they may diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst index b914d3397b6..df81a330549 100644 --- a/Doc/using/configure.rst +++ b/Doc/using/configure.rst @@ -290,8 +290,8 @@ General Options .. option:: --disable-gil - Enables **experimental** support for running Python without the - :term:`global interpreter lock` (GIL): free threading build. + Enables support for running Python without the :term:`global interpreter + lock` (GIL): free threading build. Defines the ``Py_GIL_DISABLED`` macro and adds ``"t"`` to :data:`sys.abiflags`. @@ -445,6 +445,14 @@ Options for third-party dependencies C compiler and linker flags for ``libuuid``, used by :mod:`uuid` module, overriding ``pkg-config``. +.. option:: LIBZSTD_CFLAGS +.. option:: LIBZSTD_LIBS + + C compiler and linker flags for ``libzstd``, used by :mod:`compression.zstd` module, + overriding ``pkg-config``. + + .. versionadded:: 3.14 + .. option:: PANEL_CFLAGS .. option:: PANEL_LIBS diff --git a/Doc/using/mac.rst b/Doc/using/mac.rst index 4b6c884f3d4..d44f55b5cc9 100644 --- a/Doc/using/mac.rst +++ b/Doc/using/mac.rst @@ -20,13 +20,6 @@ the Pythons provided by the CPython release team for download from the `python.org website <https://www.python.org/downloads/>`_. See :ref:`alternative_bundles` for some other options. -.. |usemac_x_dot_y| replace:: 3.13 -.. |usemac_python_x_dot_y_literal| replace:: ``python3.13`` -.. |usemac_python_x_dot_y_t_literal| replace:: ``python3.13t`` -.. |usemac_python_x_dot_y_t_literal_config| replace:: ``python3.13t-config`` -.. |usemac_applications_folder_name| replace:: ``Python 3.13`` -.. |usemac_applications_folder_version| replace:: ``/Applications/Python 3.13/`` - .. _getting-osx: .. _getting-and-installing-macpython: @@ -64,7 +57,7 @@ Clicking on the **Continue** button brings up the **Read Me** for this installer Besides other important information, the **Read Me** documents which Python version is going to be installed and on what versions of macOS it is supported. You may need to scroll through to read the whole file. By default, this **Read Me** will also be -installed in |usemac_applications_folder_version| and available to read anytime. +installed in |applications_python_version_literal| and available to read anytime. .. image:: mac_installer_02_readme.png @@ -97,7 +90,7 @@ When the installation is complete, the **Summary** window will appear. .. image:: mac_installer_06_summary.png Double-click on the :command:`Install Certificates.command` -icon or file in the |usemac_applications_folder_version| window to complete the +icon or file in the |applications_python_version_literal| window to complete the installation. .. image:: mac_installer_07_applications.png @@ -114,7 +107,7 @@ Close this terminal window and the installer window. A default install will include: -* A |usemac_applications_folder_name| folder in your :file:`Applications` folder. In here +* A |python_version_literal| folder in your :file:`Applications` folder. In here you find :program:`IDLE`, the development environment that is a standard part of official Python distributions; and :program:`Python Launcher`, which handles double-clicking Python scripts from the macOS `Finder <https://support.apple.com/en-us/HT201732>`_. @@ -141,7 +134,7 @@ How to run a Python script There are two ways to invoke the Python interpreter. If you are familiar with using a Unix shell in a terminal -window, you can invoke |usemac_python_x_dot_y_literal| or ``python3`` optionally +window, you can invoke |python_x_dot_y_literal| or ``python3`` optionally followed by one or more command line options (described in :ref:`using-on-general`). The Python tutorial also has a useful section on :ref:`using Python interactively from a shell <tut-interac>`. @@ -160,7 +153,7 @@ for more information. To run a Python script file from the terminal window, you can invoke the interpreter with the name of the script file: - |usemac_python_x_dot_y_literal| ``myscript.py`` + |python_x_dot_y_literal| ``myscript.py`` To run your script from the Finder, you can either: @@ -268,7 +261,7 @@ Installing Free-threaded Binaries The ``python.org`` :ref:`Python for macOS <getting-and-installing-macpython>` installer package can optionally install an additional build of -Python |usemac_x_dot_y| that supports :pep:`703`, the experimental free-threading feature +Python |version| that supports :pep:`703`, the experimental free-threading feature (running with the :term:`global interpreter lock` disabled). Check the release page on ``python.org`` for possible updated information. @@ -282,46 +275,54 @@ step of the installer as described above. If the box next to the **Free-threaded Python** package name is checked, a separate :file:`PythonT.framework` will also be installed alongside the normal :file:`Python.framework` in :file:`/Library/Frameworks`. -This configuration allows a free-threaded Python |usemac_x_dot_y| build to co-exist -on your system with a traditional (GIL only) Python |usemac_x_dot_y| build with +This configuration allows a free-threaded Python |version| build to co-exist +on your system with a traditional (GIL only) Python |version| build with minimal risk while installing or testing. This installation layout is itself experimental and is subject to change in future releases. Known cautions and limitations: - The **UNIX command-line tools** package, which is selected by default, - will install links in :file:`/usr/local/bin` for |usemac_python_x_dot_y_t_literal|, - the free-threaded interpreter, and |usemac_python_x_dot_y_t_literal_config|, + will install links in :file:`/usr/local/bin` for |python_x_dot_y_t_literal|, + the free-threaded interpreter, and |python_x_dot_y_t_literal_config|, a configuration utility which may be useful for package builders. Since :file:`/usr/local/bin` is typically included in your shell ``PATH``, in most cases no changes to your ``PATH`` environment variables should - be needed to use |usemac_python_x_dot_y_t_literal|. + be needed to use |python_x_dot_y_t_literal|. - For this release, the **Shell profile updater** package and the - :file:`Update Shell Profile.command` in |usemac_applications_folder_version| + :file:`Update Shell Profile.command` in |applications_python_version_literal| do not support the free-threaded package. - The free-threaded build and the traditional build have separate search paths and separate :file:`site-packages` directories so, by default, if you need a package available in both builds, it may need to be installed in both. The free-threaded package will install a separate instance of :program:`pip` for use - with |usemac_python_x_dot_y_t_literal|. + with |python_x_dot_y_t_literal|. - To install a package using :command:`pip` without a :command:`venv`: - |usemac_python_x_dot_y_t_literal| ``-m pip install <package_name>`` + .. parsed-literal:: + + python\ |version|\ t -m pip install <package_name> - When working with multiple Python environments, it is usually safest and easiest to :ref:`create and use virtual environments <tut-venv>`. This can avoid possible command name conflicts and confusion about which Python is in use: - |usemac_python_x_dot_y_t_literal| ``-m venv <venv_name>`` + .. parsed-literal:: + + python\ |version|\ t -m venv <venv_name> + then :command:`activate`. - To run a free-threaded version of IDLE: - |usemac_python_x_dot_y_t_literal| ``-m idlelib`` + .. parsed-literal:: + + python\ |version|\ t -m idlelib + - The interpreters in both builds respond to the same :ref:`PYTHON environment variables <using-on-envvars>` @@ -337,28 +338,28 @@ Known cautions and limitations: thus it only needs to be run once. - If you cannot depend on the link in ``/usr/local/bin`` pointing to the - ``python.org`` free-threaded |usemac_python_x_dot_y_t_literal| (for example, if you want + ``python.org`` free-threaded |python_x_dot_y_t_literal| (for example, if you want to install your own version there or some other distribution does), you can explicitly set your shell ``PATH`` environment variable to include the ``PythonT`` framework ``bin`` directory: - .. code-block:: sh + .. parsed-literal:: - export PATH="/Library/Frameworks/PythonT.framework/Versions/3.13/bin":"$PATH" + export PATH="/Library/Frameworks/PythonT.framework/Versions/\ |version|\ /bin":"$PATH" The traditional framework installation by default does something similar, except for :file:`Python.framework`. Be aware that having both framework ``bin`` directories in ``PATH`` can lead to confusion if there are duplicate names - like ``python3.13`` in both; which one is actually used depends on the order + like |python_x_dot_y_literal| in both; which one is actually used depends on the order they appear in ``PATH``. The ``which python3.x`` or ``which python3.xt`` commands can show which path is being used. Using virtual environments can help avoid such ambiguities. Another option might be to create a shell :command:`alias` to the desired interpreter, like: - .. code-block:: sh + .. parsed-literal:: - alias py3.13="/Library/Frameworks/Python.framework/Versions/3.13/bin/python3.13" - alias py3.13t="/Library/Frameworks/PythonT.framework/Versions/3.13/bin/python3.13t" + alias py\ |version|\ ="/Library/Frameworks/Python.framework/Versions/\ |version|\ /bin/python\ |version|\ " + alias py\ |version|\ t="/Library/Frameworks/PythonT.framework/Versions/\ |version|\ /bin/python\ |version|\ t" Installing using the command line --------------------------------- @@ -369,22 +370,22 @@ the macOS command line :command:`installer` utility lets you select non-default options, too. If you are not familiar with :command:`installer`, it can be somewhat cryptic (see :command:`man installer` for more information). As an example, the following shell snippet shows one way to do it, -using the ``3.13.0b2`` release and selecting the free-threaded interpreter +using the |x_dot_y_b2_literal| release and selecting the free-threaded interpreter option: -.. code-block:: sh +.. parsed-literal:: - RELEASE="python-3.13.0b2-macos11.pkg" + RELEASE="python-\ |version|\ 0b2-macos11.pkg" # download installer pkg - curl -O https://www.python.org/ftp/python/3.13.0/${RELEASE} + curl -O \https://www.python.org/ftp/python/\ |version|\ .0/${RELEASE} # create installer choicechanges to customize the install: - # enable the PythonTFramework-3.13 package + # enable the PythonTFramework-\ |version|\ package # while accepting the other defaults (install all other packages) cat > ./choicechanges.plist <<EOF <?xml version="1.0" encoding="UTF-8"?> - <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> + <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "\http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <array> <dict> @@ -393,7 +394,7 @@ option: <key>choiceAttribute</key> <string>selected</string> <key>choiceIdentifier</key> - <string>org.python.Python.PythonTFramework-3.13</string> + <string>org.python.Python.PythonTFramework-\ |version|\ </string> </dict> </array> </plist> @@ -404,19 +405,19 @@ option: You can then test that both installer builds are now available with something like: -.. code-block:: console +.. parsed-literal:: $ # test that the free-threaded interpreter was installed if the Unix Command Tools package was enabled - $ /usr/local/bin/python3.13t -VV - Python 3.13.0b2 experimental free-threading build (v3.13.0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] + $ /usr/local/bin/python\ |version|\ t -VV + Python \ |version|\ .0b2 experimental free-threading build (v\ |version|\ .0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] $ # and the traditional interpreter - $ /usr/local/bin/python3.13 -VV - Python 3.13.0b2 (v3.13.0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] + $ /usr/local/bin/python\ |version|\ -VV + Python \ |version|\ .0b2 (v\ |version|\ .0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] $ # test that they are also available without the prefix if /usr/local/bin is on $PATH - $ python3.13t -VV - Python 3.13.0b2 experimental free-threading build (v3.13.0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] - $ python3.13 -VV - Python 3.13.0b2 (v3.13.0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] + $ python\ |version|\ t -VV + Python \ |version|\ .0b2 experimental free-threading build (v\ |version|\ .0b2:3a83b172af, Jun 5 2024, 12:57:31) [Clang 15.0.0 (clang-1500.3.9.4)] + $ python\ |version|\ -VV + Python \ |version|\ .0b2 (v\ |version|\ .0b2:3a83b172af, Jun 5 2024, 12:50:24) [Clang 15.0.0 (clang-1500.3.9.4)] .. note:: diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst index ca330a32b33..895446e2721 100644 --- a/Doc/whatsnew/3.14.rst +++ b/Doc/whatsnew/3.14.rst @@ -816,43 +816,58 @@ Executing the new tool on the running process will yield a table like this: python -m asyncio ps 12345 - tid task id task name coroutine chain awaiter name awaiter id - --------------------------------------------------------------------------------------------------------------------------------------- - 8138752 0x564bd3d0210 Task-1 0x0 - 8138752 0x564bd3d0410 Sundowning _aexit -> __aexit__ -> main Task-1 0x564bd3d0210 - 8138752 0x564bd3d0610 TMBTE _aexit -> __aexit__ -> main Task-1 0x564bd3d0210 - 8138752 0x564bd3d0810 TNDNBTG _aexit -> __aexit__ -> album Sundowning 0x564bd3d0410 - 8138752 0x564bd3d0a10 Levitate _aexit -> __aexit__ -> album Sundowning 0x564bd3d0410 - 8138752 0x564bd3e0550 DYWTYLM _aexit -> __aexit__ -> album TMBTE 0x564bd3d0610 - 8138752 0x564bd3e0710 Aqua Regia _aexit -> __aexit__ -> album TMBTE 0x564bd3d0610 - - -or: + tid task id task name coroutine stack awaiter chain awaiter name awaiter id + ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + 1935500 0x7fc930c18050 Task-1 TaskGroup._aexit -> TaskGroup.__aexit__ -> main 0x0 + 1935500 0x7fc930c18230 Sundowning TaskGroup._aexit -> TaskGroup.__aexit__ -> album TaskGroup._aexit -> TaskGroup.__aexit__ -> main Task-1 0x7fc930c18050 + 1935500 0x7fc93173fa50 TMBTE TaskGroup._aexit -> TaskGroup.__aexit__ -> album TaskGroup._aexit -> TaskGroup.__aexit__ -> main Task-1 0x7fc930c18050 + 1935500 0x7fc93173fdf0 TNDNBTG sleep -> play TaskGroup._aexit -> TaskGroup.__aexit__ -> album Sundowning 0x7fc930c18230 + 1935500 0x7fc930d32510 Levitate sleep -> play TaskGroup._aexit -> TaskGroup.__aexit__ -> album Sundowning 0x7fc930c18230 + 1935500 0x7fc930d32890 DYWTYLM sleep -> play TaskGroup._aexit -> TaskGroup.__aexit__ -> album TMBTE 0x7fc93173fa50 + 1935500 0x7fc93161ec30 Aqua Regia sleep -> play TaskGroup._aexit -> TaskGroup.__aexit__ -> album TMBTE 0x7fc93173fa50 + +or a tree like this: .. code-block:: bash python -m asyncio pstree 12345 └── (T) Task-1 - └── main - └── __aexit__ - └── _aexit + └── main example.py:13 + └── TaskGroup.__aexit__ Lib/asyncio/taskgroups.py:72 + └── TaskGroup._aexit Lib/asyncio/taskgroups.py:121 ├── (T) Sundowning - │ └── album - │ └── __aexit__ - │ └── _aexit + │ └── album example.py:8 + │ └── TaskGroup.__aexit__ Lib/asyncio/taskgroups.py:72 + │ └── TaskGroup._aexit Lib/asyncio/taskgroups.py:121 │ ├── (T) TNDNBTG + │ │ └── play example.py:4 + │ │ └── sleep Lib/asyncio/tasks.py:702 │ └── (T) Levitate + │ └── play example.py:4 + │ └── sleep Lib/asyncio/tasks.py:702 └── (T) TMBTE - └── album - └── __aexit__ - └── _aexit + └── album example.py:8 + └── TaskGroup.__aexit__ Lib/asyncio/taskgroups.py:72 + └── TaskGroup._aexit Lib/asyncio/taskgroups.py:121 ├── (T) DYWTYLM + │ └── play example.py:4 + │ └── sleep Lib/asyncio/tasks.py:702 └── (T) Aqua Regia + └── play example.py:4 + └── sleep Lib/asyncio/tasks.py:702 If a cycle is detected in the async await graph (which could indicate a programming issue), the tool raises an error and lists the cycle paths that -prevent tree construction. +prevent tree construction: + +.. code-block:: bash + + python -m asyncio pstree 12345 + + ERROR: await-graph contains cycles - cannot print a tree! + + cycle: Task-2 → Task-3 → Task-2 (Contributed by Pablo Galindo, Łukasz Langa, Yury Selivanov, and Marta Gomez Macias in :gh:`91048`.) @@ -1244,6 +1259,14 @@ concurrent.futures buffer. (Contributed by Enzo Bonnal and Josh Rosenberg in :gh:`74028`.) +configparser +------------ + +* Security fix: will no longer write config files it cannot read. Attempting + to :meth:`configparser.ConfigParser.write` keys containing delimiters or + beginning with the section header pattern will raise a + :class:`configparser.InvalidWriteError`. + (Contributed by Jacob Lincoln in :gh:`129270`) contextvars ----------- diff --git a/Include/internal/pycore_long.h b/Include/internal/pycore_long.h index 3196d1b8208..b8efba74bdc 100644 --- a/Include/internal/pycore_long.h +++ b/Include/internal/pycore_long.h @@ -112,9 +112,9 @@ PyAPI_DATA(PyObject*) _PyLong_Rshift(PyObject *, int64_t); // Export for 'math' shared extension PyAPI_DATA(PyObject*) _PyLong_Lshift(PyObject *, int64_t); -PyAPI_FUNC(PyObject*) _PyLong_Add(PyLongObject *left, PyLongObject *right); -PyAPI_FUNC(PyObject*) _PyLong_Multiply(PyLongObject *left, PyLongObject *right); -PyAPI_FUNC(PyObject*) _PyLong_Subtract(PyLongObject *left, PyLongObject *right); +PyAPI_FUNC(PyObject*) _PyCompactLong_Add(PyLongObject *left, PyLongObject *right); +PyAPI_FUNC(PyObject*) _PyCompactLong_Multiply(PyLongObject *left, PyLongObject *right); +PyAPI_FUNC(PyObject*) _PyCompactLong_Subtract(PyLongObject *left, PyLongObject *right); // Export for 'binascii' shared extension. PyAPI_DATA(unsigned char) _PyLong_DigitValue[256]; diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h index 00e918cb8f0..59c85ddeb3d 100644 --- a/Include/internal/pycore_opcode_metadata.h +++ b/Include/internal/pycore_opcode_metadata.h @@ -1072,12 +1072,12 @@ extern const struct opcode_metadata _PyOpcode_opcode_metadata[267]; const struct opcode_metadata _PyOpcode_opcode_metadata[267] = { [BINARY_OP] = { true, INSTR_FMT_IBC0000, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_ADD_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_ADD_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_EXTEND] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_INPLACE_ADD_UNICODE] = { true, INSTR_FMT_IXC0000, HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_MULTIPLY_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_MULTIPLY_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_OP_SUBSCR_DICT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_SUBSCR_GETITEM] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG }, [BINARY_OP_SUBSCR_LIST_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, @@ -1085,7 +1085,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[267] = { [BINARY_OP_SUBSCR_STR_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_SUBSCR_TUPLE_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG }, [BINARY_OP_SUBTRACT_FLOAT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG }, - [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC0000, HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, + [BINARY_OP_SUBTRACT_INT] = { true, INSTR_FMT_IXC0000, HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG }, [BINARY_SLICE] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BUILD_INTERPOLATION] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG }, [BUILD_LIST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG }, diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h index b08909e72c4..cd36023c25c 100644 --- a/Include/internal/pycore_uop_metadata.h +++ b/Include/internal/pycore_uop_metadata.h @@ -85,9 +85,9 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = { [_UNARY_INVERT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG, [_GUARD_NOS_INT] = HAS_EXIT_FLAG, [_GUARD_TOS_INT] = HAS_EXIT_FLAG, - [_BINARY_OP_MULTIPLY_INT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG | HAS_PURE_FLAG, - [_BINARY_OP_ADD_INT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG | HAS_PURE_FLAG, - [_BINARY_OP_SUBTRACT_INT] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG | HAS_PURE_FLAG, + [_BINARY_OP_MULTIPLY_INT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_PURE_FLAG, + [_BINARY_OP_ADD_INT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_PURE_FLAG, + [_BINARY_OP_SUBTRACT_INT] = HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_PURE_FLAG, [_GUARD_NOS_FLOAT] = HAS_EXIT_FLAG, [_GUARD_TOS_FLOAT] = HAS_EXIT_FLAG, [_BINARY_OP_MULTIPLY_FLOAT] = HAS_ERROR_FLAG | HAS_PURE_FLAG, diff --git a/Lib/asyncio/tools.py b/Lib/asyncio/tools.py index 3fc4524c008..2683f34cc71 100644 --- a/Lib/asyncio/tools.py +++ b/Lib/asyncio/tools.py @@ -1,11 +1,10 @@ """Tools to analyze tasks running in asyncio programs.""" -from collections import defaultdict +from collections import defaultdict, namedtuple from itertools import count from enum import Enum import sys -from _remote_debugging import RemoteUnwinder - +from _remote_debugging import RemoteUnwinder, FrameInfo class NodeType(Enum): COROUTINE = 1 @@ -26,51 +25,75 @@ class CycleFoundException(Exception): # ─── indexing helpers ─────────────────────────────────────────── -def _format_stack_entry(elem: tuple[str, str, int] | str) -> str: - if isinstance(elem, tuple): - fqname, path, line_no = elem - return f"{fqname} {path}:{line_no}" - +def _format_stack_entry(elem: str|FrameInfo) -> str: + if not isinstance(elem, str): + if elem.lineno == 0 and elem.filename == "": + return f"{elem.funcname}" + else: + return f"{elem.funcname} {elem.filename}:{elem.lineno}" return elem def _index(result): - id2name, awaits = {}, [] - for _thr_id, tasks in result: - for tid, tname, awaited in tasks: - id2name[tid] = tname - for stack, parent_id in awaited: - stack = [_format_stack_entry(elem) for elem in stack] - awaits.append((parent_id, stack, tid)) - return id2name, awaits - - -def _build_tree(id2name, awaits): + id2name, awaits, task_stacks = {}, [], {} + for awaited_info in result: + for task_info in awaited_info.awaited_by: + task_id = task_info.task_id + task_name = task_info.task_name + id2name[task_id] = task_name + + # Store the internal coroutine stack for this task + if task_info.coroutine_stack: + for coro_info in task_info.coroutine_stack: + call_stack = coro_info.call_stack + internal_stack = [_format_stack_entry(frame) for frame in call_stack] + task_stacks[task_id] = internal_stack + + # Add the awaited_by relationships (external dependencies) + if task_info.awaited_by: + for coro_info in task_info.awaited_by: + call_stack = coro_info.call_stack + parent_task_id = coro_info.task_name + stack = [_format_stack_entry(frame) for frame in call_stack] + awaits.append((parent_task_id, stack, task_id)) + return id2name, awaits, task_stacks + + +def _build_tree(id2name, awaits, task_stacks): id2label = {(NodeType.TASK, tid): name for tid, name in id2name.items()} children = defaultdict(list) - cor_names = defaultdict(dict) # (parent) -> {frame: node} - cor_id_seq = count(1) - - def _cor_node(parent_key, frame_name): - """Return an existing or new (NodeType.COROUTINE, …) node under *parent_key*.""" - bucket = cor_names[parent_key] - if frame_name in bucket: - return bucket[frame_name] - node_key = (NodeType.COROUTINE, f"c{next(cor_id_seq)}") - id2label[node_key] = frame_name - children[parent_key].append(node_key) - bucket[frame_name] = node_key + cor_nodes = defaultdict(dict) # Maps parent -> {frame_name: node_key} + next_cor_id = count(1) + + def get_or_create_cor_node(parent, frame): + """Get existing coroutine node or create new one under parent""" + if frame in cor_nodes[parent]: + return cor_nodes[parent][frame] + + node_key = (NodeType.COROUTINE, f"c{next(next_cor_id)}") + id2label[node_key] = frame + children[parent].append(node_key) + cor_nodes[parent][frame] = node_key return node_key - # lay down parent ➜ …frames… ➜ child paths + # Build task dependency tree with coroutine frames for parent_id, stack, child_id in awaits: cur = (NodeType.TASK, parent_id) - for frame in reversed(stack): # outer-most → inner-most - cur = _cor_node(cur, frame) + for frame in reversed(stack): + cur = get_or_create_cor_node(cur, frame) + child_key = (NodeType.TASK, child_id) if child_key not in children[cur]: children[cur].append(child_key) + # Add coroutine stacks for leaf tasks + awaiting_tasks = {parent_id for parent_id, _, _ in awaits} + for task_id in id2name: + if task_id not in awaiting_tasks and task_id in task_stacks: + cur = (NodeType.TASK, task_id) + for frame in reversed(task_stacks[task_id]): + cur = get_or_create_cor_node(cur, frame) + return id2label, children @@ -129,12 +152,12 @@ def build_async_tree(result, task_emoji="(T)", cor_emoji=""): The call tree is produced by `get_all_async_stacks()`, prefixing tasks with `task_emoji` and coroutine frames with `cor_emoji`. """ - id2name, awaits = _index(result) + id2name, awaits, task_stacks = _index(result) g = _task_graph(awaits) cycles = _find_cycles(g) if cycles: raise CycleFoundException(cycles, id2name) - labels, children = _build_tree(id2name, awaits) + labels, children = _build_tree(id2name, awaits, task_stacks) def pretty(node): flag = task_emoji if node[0] == NodeType.TASK else cor_emoji @@ -154,35 +177,40 @@ def build_async_tree(result, task_emoji="(T)", cor_emoji=""): def build_task_table(result): - id2name, awaits = _index(result) + id2name, _, _ = _index(result) table = [] - for tid, tasks in result: - for task_id, task_name, awaited in tasks: - if not awaited: - table.append( - [ - tid, - hex(task_id), - task_name, - "", - "", - "0x0" - ] - ) - for stack, awaiter_id in awaited: - stack = [elem[0] if isinstance(elem, tuple) else elem for elem in stack] - coroutine_chain = " -> ".join(stack) - awaiter_name = id2name.get(awaiter_id, "Unknown") - table.append( - [ - tid, - hex(task_id), - task_name, - coroutine_chain, - awaiter_name, - hex(awaiter_id), - ] - ) + + for awaited_info in result: + thread_id = awaited_info.thread_id + for task_info in awaited_info.awaited_by: + # Get task info + task_id = task_info.task_id + task_name = task_info.task_name + + # Build coroutine stack string + frames = [frame for coro in task_info.coroutine_stack + for frame in coro.call_stack] + coro_stack = " -> ".join(_format_stack_entry(x).split(" ")[0] + for x in frames) + + # Handle tasks with no awaiters + if not task_info.awaited_by: + table.append([thread_id, hex(task_id), task_name, coro_stack, + "", "", "0x0"]) + continue + + # Handle tasks with awaiters + for coro_info in task_info.awaited_by: + parent_id = coro_info.task_name + awaiter_frames = [_format_stack_entry(x).split(" ")[0] + for x in coro_info.call_stack] + awaiter_chain = " -> ".join(awaiter_frames) + awaiter_name = id2name.get(parent_id, "Unknown") + parent_id_str = (hex(parent_id) if isinstance(parent_id, int) + else str(parent_id)) + + table.append([thread_id, hex(task_id), task_name, coro_stack, + awaiter_chain, awaiter_name, parent_id_str]) return table @@ -211,11 +239,11 @@ def display_awaited_by_tasks_table(pid: int) -> None: table = build_task_table(tasks) # Print the table in a simple tabular format print( - f"{'tid':<10} {'task id':<20} {'task name':<20} {'coroutine chain':<50} {'awaiter name':<20} {'awaiter id':<15}" + f"{'tid':<10} {'task id':<20} {'task name':<20} {'coroutine stack':<50} {'awaiter chain':<50} {'awaiter name':<15} {'awaiter id':<15}" ) - print("-" * 135) + print("-" * 180) for row in table: - print(f"{row[0]:<10} {row[1]:<20} {row[2]:<20} {row[3]:<50} {row[4]:<20} {row[5]:<15}") + print(f"{row[0]:<10} {row[1]:<20} {row[2]:<20} {row[3]:<50} {row[4]:<50} {row[5]:<15} {row[6]:<15}") def display_awaited_by_tasks_tree(pid: int) -> None: diff --git a/Lib/configparser.py b/Lib/configparser.py index 239fda60a02..18af1eadaad 100644 --- a/Lib/configparser.py +++ b/Lib/configparser.py @@ -1218,11 +1218,14 @@ class RawConfigParser(MutableMapping): def _validate_key_contents(self, key): """Raises an InvalidWriteError for any keys containing - delimiters or that match the section header pattern""" + delimiters or that begins with the section header pattern""" if re.match(self.SECTCRE, key): - raise InvalidWriteError("Cannot write keys matching section pattern") - if any(delim in key for delim in self._delimiters): - raise InvalidWriteError("Cannot write key that contains delimiters") + raise InvalidWriteError( + f"Cannot write key {key}; begins with section pattern") + for delim in self._delimiters: + if delim in key: + raise InvalidWriteError( + f"Cannot write key {key}; contains delimiter {delim}") def _validate_value_types(self, *, section="", option="", value=""): """Raises a TypeError for illegal non-string values. diff --git a/Lib/email/header.py b/Lib/email/header.py index 113a81f4131..220a84a7454 100644 --- a/Lib/email/header.py +++ b/Lib/email/header.py @@ -59,16 +59,22 @@ _max_append = email.quoprimime._max_append def decode_header(header): """Decode a message header value without converting charset. - Returns a list of (string, charset) pairs containing each of the decoded - parts of the header. Charset is None for non-encoded parts of the header, - otherwise a lower-case string containing the name of the character set - specified in the encoded string. + For historical reasons, this function may return either: + + 1. A list of length 1 containing a pair (str, None). + 2. A list of (bytes, charset) pairs containing each of the decoded + parts of the header. Charset is None for non-encoded parts of the header, + otherwise a lower-case string containing the name of the character set + specified in the encoded string. header may be a string that may or may not contain RFC2047 encoded words, or it may be a Header object. An email.errors.HeaderParseError may be raised when certain decoding error occurs (e.g. a base64 decoding exception). + + This function exists for backwards compatibility only. For new code, we + recommend using email.headerregistry.HeaderRegistry instead. """ # If it is a Header object, we can just return the encoded chunks. if hasattr(header, '_chunks'): @@ -161,6 +167,9 @@ def make_header(decoded_seq, maxlinelen=None, header_name=None, This function takes one of those sequence of pairs and returns a Header instance. Optional maxlinelen, header_name, and continuation_ws are as in the Header constructor. + + This function exists for backwards compatibility only, and is not + recommended for use in new code. """ h = Header(maxlinelen=maxlinelen, header_name=header_name, continuation_ws=continuation_ws) diff --git a/Lib/http/server.py b/Lib/http/server.py index ef10d185932..a2ffbe2e44d 100644 --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -115,7 +115,7 @@ DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8" class HTTPServer(socketserver.TCPServer): allow_reuse_address = True # Seems to make sense in testing environment - allow_reuse_port = True + allow_reuse_port = False def server_bind(self): """Override server_bind to store the server name.""" diff --git a/Lib/logging/config.py b/Lib/logging/config.py index c994349fd6e..3d9aa00fa52 100644 --- a/Lib/logging/config.py +++ b/Lib/logging/config.py @@ -1018,7 +1018,7 @@ def listen(port=DEFAULT_LOGGING_CONFIG_PORT, verify=None): """ allow_reuse_address = True - allow_reuse_port = True + allow_reuse_port = False def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT, handler=None, ready=None, verify=None): diff --git a/Lib/platform.py b/Lib/platform.py index 077db81264a..e7f180fc5ac 100644 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -1144,7 +1144,7 @@ def _sys_version(sys_version=None): # CPython cpython_sys_version_parser = re.compile( r'([\w.+]+)\s*' # "version<space>" - r'(?:experimental free-threading build\s+)?' # "free-threading-build<space>" + r'(?:free-threading build\s+)?' # "free-threading-build<space>" r'\(#?([^,]+)' # "(#buildno" r'(?:,\s*([\w ]*)' # ", builddate" r'(?:,\s*([\w :]*))?)?\)\s*' # ", buildtime)<space>" diff --git a/Lib/test/_code_definitions.py b/Lib/test/_code_definitions.py index 274beb65a6d..70c44da2ec6 100644 --- a/Lib/test/_code_definitions.py +++ b/Lib/test/_code_definitions.py @@ -57,6 +57,13 @@ def spam_with_globals_and_builtins(): print(res) +def spam_with_global_and_attr_same_name(): + try: + spam_minimal.spam_minimal + except AttributeError: + pass + + def spam_full_args(a, b, /, c, d, *args, e, f, **kwargs): return (a, b, c, d, e, f, args, kwargs) @@ -190,6 +197,7 @@ TOP_FUNCTIONS = [ spam_minimal, spam_with_builtins, spam_with_globals_and_builtins, + spam_with_global_and_attr_same_name, spam_full_args, spam_full_args_with_defaults, spam_args_attrs_and_builtins, @@ -258,6 +266,7 @@ STATELESS_CODE = [ script_with_globals, spam_full_args_with_defaults, spam_with_globals_and_builtins, + spam_with_global_and_attr_same_name, spam_full, ] @@ -275,6 +284,7 @@ SCRIPT_FUNCTIONS = [ *PURE_SCRIPT_FUNCTIONS, script_with_globals, spam_with_globals_and_builtins, + spam_with_global_and_attr_same_name, ] diff --git a/Lib/test/test_asyncio/test_tools.py b/Lib/test/test_asyncio/test_tools.py index ba36e759ccd..34e94830204 100644 --- a/Lib/test/test_asyncio/test_tools.py +++ b/Lib/test/test_asyncio/test_tools.py @@ -2,6 +2,13 @@ import unittest from asyncio import tools +from collections import namedtuple + +FrameInfo = namedtuple('FrameInfo', ['funcname', 'filename', 'lineno']) +CoroInfo = namedtuple('CoroInfo', ['call_stack', 'task_name']) +TaskInfo = namedtuple('TaskInfo', ['task_id', 'task_name', 'coroutine_stack', 'awaited_by']) +AwaitedInfo = namedtuple('AwaitedInfo', ['thread_id', 'awaited_by']) + # mock output of get_all_awaited_by function. TEST_INPUTS_TREE = [ @@ -10,81 +17,151 @@ TEST_INPUTS_TREE = [ # different subtasks part of a TaskGroup (root1 and root2) which call # awaiter functions. ( - ( - 1, - [ - (2, "Task-1", []), - ( - 3, - "timer", - [ - [[("awaiter3", "/path/to/app.py", 130), - ("awaiter2", "/path/to/app.py", 120), - ("awaiter", "/path/to/app.py", 110)], 4], - [[("awaiterB3", "/path/to/app.py", 190), - ("awaiterB2", "/path/to/app.py", 180), - ("awaiterB", "/path/to/app.py", 170)], 5], - [[("awaiterB3", "/path/to/app.py", 190), - ("awaiterB2", "/path/to/app.py", 180), - ("awaiterB", "/path/to/app.py", 170)], 6], - [[("awaiter3", "/path/to/app.py", 130), - ("awaiter2", "/path/to/app.py", 120), - ("awaiter", "/path/to/app.py", 110)], 7], - ], - ), - ( - 8, - "root1", - [[["_aexit", "__aexit__", "main"], 2]], - ), - ( - 9, - "root2", - [[["_aexit", "__aexit__", "main"], 2]], - ), - ( - 4, - "child1_1", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 8, - ] - ], - ), - ( - 6, - "child2_1", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 8, - ] - ], - ), - ( - 7, - "child1_2", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 9, - ] - ], - ), - ( - 5, - "child2_2", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 9, - ] - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] ), - ], + TaskInfo( + task_id=3, + task_name="timer", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("awaiter3", "/path/to/app.py", 130), + FrameInfo("awaiter2", "/path/to/app.py", 120), + FrameInfo("awaiter", "/path/to/app.py", 110) + ], + task_name=4 + ), + CoroInfo( + call_stack=[ + FrameInfo("awaiterB3", "/path/to/app.py", 190), + FrameInfo("awaiterB2", "/path/to/app.py", 180), + FrameInfo("awaiterB", "/path/to/app.py", 170) + ], + task_name=5 + ), + CoroInfo( + call_stack=[ + FrameInfo("awaiterB3", "/path/to/app.py", 190), + FrameInfo("awaiterB2", "/path/to/app.py", 180), + FrameInfo("awaiterB", "/path/to/app.py", 170) + ], + task_name=6 + ), + CoroInfo( + call_stack=[ + FrameInfo("awaiter3", "/path/to/app.py", 130), + FrameInfo("awaiter2", "/path/to/app.py", 120), + FrameInfo("awaiter", "/path/to/app.py", 110) + ], + task_name=7 + ) + ] + ), + TaskInfo( + task_id=8, + task_name="root1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("main", "", 0) + ], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=9, + task_name="root2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("main", "", 0) + ], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="child1_1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=8 + ) + ] + ), + TaskInfo( + task_id=6, + task_name="child2_1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=8 + ) + ] + ), + TaskInfo( + task_id=7, + task_name="child1_2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=9 + ) + ] + ), + TaskInfo( + task_id=5, + task_name="child2_2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=9 + ) + ] + ) + ] ), - (0, []), + AwaitedInfo(thread_id=0, awaited_by=[]) ), ( [ @@ -130,26 +207,96 @@ TEST_INPUTS_TREE = [ [ # test case containing two roots ( - ( - 9, - [ - (5, "Task-5", []), - (6, "Task-6", [[["main2"], 5]]), - (7, "Task-7", [[["main2"], 5]]), - (8, "Task-8", [[["main2"], 5]]), - ], + AwaitedInfo( + thread_id=9, + awaited_by=[ + TaskInfo( + task_id=5, + task_name="Task-5", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=6, + task_name="Task-6", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main2", "", 0)], + task_name=5 + ) + ] + ), + TaskInfo( + task_id=7, + task_name="Task-7", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main2", "", 0)], + task_name=5 + ) + ] + ), + TaskInfo( + task_id=8, + task_name="Task-8", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main2", "", 0)], + task_name=5 + ) + ] + ) + ] ), - ( - 10, - [ - (1, "Task-1", []), - (2, "Task-2", [[["main"], 1]]), - (3, "Task-3", [[["main"], 1]]), - (4, "Task-4", [[["main"], 1]]), - ], + AwaitedInfo( + thread_id=10, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=2, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=1 + ) + ] + ), + TaskInfo( + task_id=3, + task_name="Task-3", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=1 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="Task-4", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=1 + ) + ] + ) + ] ), - (11, []), - (0, []), + AwaitedInfo(thread_id=11, awaited_by=[]), + AwaitedInfo(thread_id=0, awaited_by=[]) ), ( [ @@ -174,18 +321,63 @@ TEST_INPUTS_TREE = [ # test case containing two roots, one of them without subtasks ( [ - (1, [(2, "Task-5", [])]), - ( - 3, - [ - (4, "Task-1", []), - (5, "Task-2", [[["main"], 4]]), - (6, "Task-3", [[["main"], 4]]), - (7, "Task-4", [[["main"], 4]]), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-5", + coroutine_stack=[], + awaited_by=[] + ) + ] ), - (8, []), - (0, []), + AwaitedInfo( + thread_id=3, + awaited_by=[ + TaskInfo( + task_id=4, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=5, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=4 + ) + ] + ), + TaskInfo( + task_id=6, + task_name="Task-3", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=4 + ) + ] + ), + TaskInfo( + task_id=7, + task_name="Task-4", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=4 + ) + ] + ) + ] + ), + AwaitedInfo(thread_id=8, awaited_by=[]), + AwaitedInfo(thread_id=0, awaited_by=[]) ] ), ( @@ -208,19 +400,44 @@ TEST_INPUTS_CYCLES_TREE = [ # this test case contains a cycle: two tasks awaiting each other. ( [ - ( - 1, - [ - (2, "Task-1", []), - ( - 3, - "a", - [[["awaiter2"], 4], [["main"], 2]], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] ), - (4, "b", [[["awaiter"], 3]]), - ], + TaskInfo( + task_id=3, + task_name="a", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("awaiter2", "", 0)], + task_name=4 + ), + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="b", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("awaiter", "", 0)], + task_name=3 + ) + ] + ) + ] ), - (0, []), + AwaitedInfo(thread_id=0, awaited_by=[]) ] ), ([[4, 3, 4]]), @@ -229,32 +446,85 @@ TEST_INPUTS_CYCLES_TREE = [ # this test case contains two cycles ( [ - ( - 1, - [ - (2, "Task-1", []), - ( - 3, - "A", - [[["nested", "nested", "task_b"], 4]], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] ), - ( - 4, - "B", - [ - [["nested", "nested", "task_c"], 5], - [["nested", "nested", "task_a"], 3], - ], + TaskInfo( + task_id=3, + task_name="A", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_b", "", 0) + ], + task_name=4 + ) + ] ), - (5, "C", [[["nested", "nested"], 6]]), - ( - 6, - "Task-2", - [[["nested", "nested", "task_b"], 4]], + TaskInfo( + task_id=4, + task_name="B", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_c", "", 0) + ], + task_name=5 + ), + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_a", "", 0) + ], + task_name=3 + ) + ] ), - ], + TaskInfo( + task_id=5, + task_name="C", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0) + ], + task_name=6 + ) + ] + ), + TaskInfo( + task_id=6, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_b", "", 0) + ], + task_name=4 + ) + ] + ) + ] ), - (0, []), + AwaitedInfo(thread_id=0, awaited_by=[]) ] ), ([[4, 3, 4], [4, 6, 5, 4]]), @@ -267,81 +537,160 @@ TEST_INPUTS_TABLE = [ # different subtasks part of a TaskGroup (root1 and root2) which call # awaiter functions. ( - ( - 1, - [ - (2, "Task-1", []), - ( - 3, - "timer", - [ - [["awaiter3", "awaiter2", "awaiter"], 4], - [["awaiter1_3", "awaiter1_2", "awaiter1"], 5], - [["awaiter1_3", "awaiter1_2", "awaiter1"], 6], - [["awaiter3", "awaiter2", "awaiter"], 7], - ], - ), - ( - 8, - "root1", - [[["_aexit", "__aexit__", "main"], 2]], - ), - ( - 9, - "root2", - [[["_aexit", "__aexit__", "main"], 2]], - ), - ( - 4, - "child1_1", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 8, - ] - ], - ), - ( - 6, - "child2_1", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 8, - ] - ], - ), - ( - 7, - "child1_2", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 9, - ] - ], - ), - ( - 5, - "child2_2", - [ - [ - ["_aexit", "__aexit__", "blocho_caller", "bloch"], - 9, - ] - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] ), - ], + TaskInfo( + task_id=3, + task_name="timer", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("awaiter3", "", 0), + FrameInfo("awaiter2", "", 0), + FrameInfo("awaiter", "", 0) + ], + task_name=4 + ), + CoroInfo( + call_stack=[ + FrameInfo("awaiter1_3", "", 0), + FrameInfo("awaiter1_2", "", 0), + FrameInfo("awaiter1", "", 0) + ], + task_name=5 + ), + CoroInfo( + call_stack=[ + FrameInfo("awaiter1_3", "", 0), + FrameInfo("awaiter1_2", "", 0), + FrameInfo("awaiter1", "", 0) + ], + task_name=6 + ), + CoroInfo( + call_stack=[ + FrameInfo("awaiter3", "", 0), + FrameInfo("awaiter2", "", 0), + FrameInfo("awaiter", "", 0) + ], + task_name=7 + ) + ] + ), + TaskInfo( + task_id=8, + task_name="root1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("main", "", 0) + ], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=9, + task_name="root2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("main", "", 0) + ], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="child1_1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=8 + ) + ] + ), + TaskInfo( + task_id=6, + task_name="child2_1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=8 + ) + ] + ), + TaskInfo( + task_id=7, + task_name="child1_2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=9 + ) + ] + ), + TaskInfo( + task_id=5, + task_name="child2_2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("_aexit", "", 0), + FrameInfo("__aexit__", "", 0), + FrameInfo("blocho_caller", "", 0), + FrameInfo("bloch", "", 0) + ], + task_name=9 + ) + ] + ) + ] ), - (0, []), + AwaitedInfo(thread_id=0, awaited_by=[]) ), ( [ - [1, "0x2", "Task-1", "", "", "0x0"], + [1, "0x2", "Task-1", "", "", "", "0x0"], [ 1, "0x3", "timer", + "", "awaiter3 -> awaiter2 -> awaiter", "child1_1", "0x4", @@ -350,6 +699,7 @@ TEST_INPUTS_TABLE = [ 1, "0x3", "timer", + "", "awaiter1_3 -> awaiter1_2 -> awaiter1", "child2_2", "0x5", @@ -358,6 +708,7 @@ TEST_INPUTS_TABLE = [ 1, "0x3", "timer", + "", "awaiter1_3 -> awaiter1_2 -> awaiter1", "child2_1", "0x6", @@ -366,6 +717,7 @@ TEST_INPUTS_TABLE = [ 1, "0x3", "timer", + "", "awaiter3 -> awaiter2 -> awaiter", "child1_2", "0x7", @@ -374,6 +726,7 @@ TEST_INPUTS_TABLE = [ 1, "0x8", "root1", + "", "_aexit -> __aexit__ -> main", "Task-1", "0x2", @@ -382,6 +735,7 @@ TEST_INPUTS_TABLE = [ 1, "0x9", "root2", + "", "_aexit -> __aexit__ -> main", "Task-1", "0x2", @@ -390,6 +744,7 @@ TEST_INPUTS_TABLE = [ 1, "0x4", "child1_1", + "", "_aexit -> __aexit__ -> blocho_caller -> bloch", "root1", "0x8", @@ -398,6 +753,7 @@ TEST_INPUTS_TABLE = [ 1, "0x6", "child2_1", + "", "_aexit -> __aexit__ -> blocho_caller -> bloch", "root1", "0x8", @@ -406,6 +762,7 @@ TEST_INPUTS_TABLE = [ 1, "0x7", "child1_2", + "", "_aexit -> __aexit__ -> blocho_caller -> bloch", "root2", "0x9", @@ -414,6 +771,7 @@ TEST_INPUTS_TABLE = [ 1, "0x5", "child2_2", + "", "_aexit -> __aexit__ -> blocho_caller -> bloch", "root2", "0x9", @@ -424,37 +782,107 @@ TEST_INPUTS_TABLE = [ [ # test case containing two roots ( - ( - 9, - [ - (5, "Task-5", []), - (6, "Task-6", [[["main2"], 5]]), - (7, "Task-7", [[["main2"], 5]]), - (8, "Task-8", [[["main2"], 5]]), - ], + AwaitedInfo( + thread_id=9, + awaited_by=[ + TaskInfo( + task_id=5, + task_name="Task-5", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=6, + task_name="Task-6", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main2", "", 0)], + task_name=5 + ) + ] + ), + TaskInfo( + task_id=7, + task_name="Task-7", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main2", "", 0)], + task_name=5 + ) + ] + ), + TaskInfo( + task_id=8, + task_name="Task-8", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main2", "", 0)], + task_name=5 + ) + ] + ) + ] ), - ( - 10, - [ - (1, "Task-1", []), - (2, "Task-2", [[["main"], 1]]), - (3, "Task-3", [[["main"], 1]]), - (4, "Task-4", [[["main"], 1]]), - ], + AwaitedInfo( + thread_id=10, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=2, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=1 + ) + ] + ), + TaskInfo( + task_id=3, + task_name="Task-3", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=1 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="Task-4", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=1 + ) + ] + ) + ] ), - (11, []), - (0, []), + AwaitedInfo(thread_id=11, awaited_by=[]), + AwaitedInfo(thread_id=0, awaited_by=[]) ), ( [ - [9, "0x5", "Task-5", "", "", "0x0"], - [9, "0x6", "Task-6", "main2", "Task-5", "0x5"], - [9, "0x7", "Task-7", "main2", "Task-5", "0x5"], - [9, "0x8", "Task-8", "main2", "Task-5", "0x5"], - [10, "0x1", "Task-1", "", "", "0x0"], - [10, "0x2", "Task-2", "main", "Task-1", "0x1"], - [10, "0x3", "Task-3", "main", "Task-1", "0x1"], - [10, "0x4", "Task-4", "main", "Task-1", "0x1"], + [9, "0x5", "Task-5", "", "", "", "0x0"], + [9, "0x6", "Task-6", "", "main2", "Task-5", "0x5"], + [9, "0x7", "Task-7", "", "main2", "Task-5", "0x5"], + [9, "0x8", "Task-8", "", "main2", "Task-5", "0x5"], + [10, "0x1", "Task-1", "", "", "", "0x0"], + [10, "0x2", "Task-2", "", "main", "Task-1", "0x1"], + [10, "0x3", "Task-3", "", "main", "Task-1", "0x1"], + [10, "0x4", "Task-4", "", "main", "Task-1", "0x1"], ] ), ], @@ -462,27 +890,72 @@ TEST_INPUTS_TABLE = [ # test case containing two roots, one of them without subtasks ( [ - (1, [(2, "Task-5", [])]), - ( - 3, - [ - (4, "Task-1", []), - (5, "Task-2", [[["main"], 4]]), - (6, "Task-3", [[["main"], 4]]), - (7, "Task-4", [[["main"], 4]]), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-5", + coroutine_stack=[], + awaited_by=[] + ) + ] ), - (8, []), - (0, []), + AwaitedInfo( + thread_id=3, + awaited_by=[ + TaskInfo( + task_id=4, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=5, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=4 + ) + ] + ), + TaskInfo( + task_id=6, + task_name="Task-3", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=4 + ) + ] + ), + TaskInfo( + task_id=7, + task_name="Task-4", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=4 + ) + ] + ) + ] + ), + AwaitedInfo(thread_id=8, awaited_by=[]), + AwaitedInfo(thread_id=0, awaited_by=[]) ] ), ( [ - [1, "0x2", "Task-5", "", "", "0x0"], - [3, "0x4", "Task-1", "", "", "0x0"], - [3, "0x5", "Task-2", "main", "Task-1", "0x4"], - [3, "0x6", "Task-3", "main", "Task-1", "0x4"], - [3, "0x7", "Task-4", "main", "Task-1", "0x4"], + [1, "0x2", "Task-5", "", "", "", "0x0"], + [3, "0x4", "Task-1", "", "", "", "0x0"], + [3, "0x5", "Task-2", "", "main", "Task-1", "0x4"], + [3, "0x6", "Task-3", "", "main", "Task-1", "0x4"], + [3, "0x7", "Task-4", "", "main", "Task-1", "0x4"], ] ), ], @@ -491,27 +964,52 @@ TEST_INPUTS_TABLE = [ # this test case contains a cycle: two tasks awaiting each other. ( [ - ( - 1, - [ - (2, "Task-1", []), - ( - 3, - "a", - [[["awaiter2"], 4], [["main"], 2]], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] ), - (4, "b", [[["awaiter"], 3]]), - ], + TaskInfo( + task_id=3, + task_name="a", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("awaiter2", "", 0)], + task_name=4 + ), + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="b", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("awaiter", "", 0)], + task_name=3 + ) + ] + ) + ] ), - (0, []), + AwaitedInfo(thread_id=0, awaited_by=[]) ] ), ( [ - [1, "0x2", "Task-1", "", "", "0x0"], - [1, "0x3", "a", "awaiter2", "b", "0x4"], - [1, "0x3", "a", "main", "Task-1", "0x2"], - [1, "0x4", "b", "awaiter", "a", "0x3"], + [1, "0x2", "Task-1", "", "", "", "0x0"], + [1, "0x3", "a", "", "awaiter2", "b", "0x4"], + [1, "0x3", "a", "", "main", "Task-1", "0x2"], + [1, "0x4", "b", "", "awaiter", "a", "0x3"], ] ), ], @@ -519,41 +1017,95 @@ TEST_INPUTS_TABLE = [ # this test case contains two cycles ( [ - ( - 1, - [ - (2, "Task-1", []), - ( - 3, - "A", - [[["nested", "nested", "task_b"], 4]], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=3, + task_name="A", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_b", "", 0) + ], + task_name=4 + ) + ] ), - ( - 4, - "B", - [ - [["nested", "nested", "task_c"], 5], - [["nested", "nested", "task_a"], 3], - ], + TaskInfo( + task_id=4, + task_name="B", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_c", "", 0) + ], + task_name=5 + ), + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_a", "", 0) + ], + task_name=3 + ) + ] ), - (5, "C", [[["nested", "nested"], 6]]), - ( - 6, - "Task-2", - [[["nested", "nested", "task_b"], 4]], + TaskInfo( + task_id=5, + task_name="C", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0) + ], + task_name=6 + ) + ] ), - ], + TaskInfo( + task_id=6, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("nested", "", 0), + FrameInfo("nested", "", 0), + FrameInfo("task_b", "", 0) + ], + task_name=4 + ) + ] + ) + ] ), - (0, []), + AwaitedInfo(thread_id=0, awaited_by=[]) ] ), ( [ - [1, "0x2", "Task-1", "", "", "0x0"], + [1, "0x2", "Task-1", "", "", "", "0x0"], [ 1, "0x3", "A", + "", "nested -> nested -> task_b", "B", "0x4", @@ -562,6 +1114,7 @@ TEST_INPUTS_TABLE = [ 1, "0x4", "B", + "", "nested -> nested -> task_c", "C", "0x5", @@ -570,6 +1123,7 @@ TEST_INPUTS_TABLE = [ 1, "0x4", "B", + "", "nested -> nested -> task_a", "A", "0x3", @@ -578,6 +1132,7 @@ TEST_INPUTS_TABLE = [ 1, "0x5", "C", + "", "nested -> nested", "Task-2", "0x6", @@ -586,6 +1141,7 @@ TEST_INPUTS_TABLE = [ 1, "0x6", "Task-2", + "", "nested -> nested -> task_b", "B", "0x4", @@ -600,7 +1156,8 @@ class TestAsyncioToolsTree(unittest.TestCase): def test_asyncio_utils(self): for input_, tree in TEST_INPUTS_TREE: with self.subTest(input_): - self.assertEqual(tools.build_async_tree(input_), tree) + result = tools.build_async_tree(input_) + self.assertEqual(result, tree) def test_asyncio_utils_cycles(self): for input_, cycles in TEST_INPUTS_CYCLES_TREE: @@ -615,7 +1172,8 @@ class TestAsyncioToolsTable(unittest.TestCase): def test_asyncio_utils(self): for input_, table in TEST_INPUTS_TABLE: with self.subTest(input_): - self.assertEqual(tools.build_task_table(input_), table) + result = tools.build_task_table(input_) + self.assertEqual(result, table) class TestAsyncioToolsBasic(unittest.TestCase): @@ -632,26 +1190,67 @@ class TestAsyncioToolsBasic(unittest.TestCase): self.assertEqual(tools.build_task_table(result), expected_output) def test_only_independent_tasks_tree(self): - input_ = [(1, [(10, "taskA", []), (11, "taskB", [])])] + input_ = [ + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=10, + task_name="taskA", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=11, + task_name="taskB", + coroutine_stack=[], + awaited_by=[] + ) + ] + ) + ] expected = [["└── (T) taskA"], ["└── (T) taskB"]] result = tools.build_async_tree(input_) self.assertEqual(sorted(result), sorted(expected)) def test_only_independent_tasks_table(self): - input_ = [(1, [(10, "taskA", []), (11, "taskB", [])])] + input_ = [ + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=10, + task_name="taskA", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=11, + task_name="taskB", + coroutine_stack=[], + awaited_by=[] + ) + ] + ) + ] self.assertEqual( tools.build_task_table(input_), - [[1, "0xa", "taskA", "", "", "0x0"], [1, "0xb", "taskB", "", "", "0x0"]], + [[1, '0xa', 'taskA', '', '', '', '0x0'], [1, '0xb', 'taskB', '', '', '', '0x0']] ) def test_single_task_tree(self): """Test build_async_tree with a single task and no awaits.""" result = [ - ( - 1, - [ - (2, "Task-1", []), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ) + ] ) ] expected_output = [ @@ -664,25 +1263,50 @@ class TestAsyncioToolsBasic(unittest.TestCase): def test_single_task_table(self): """Test build_task_table with a single task and no awaits.""" result = [ - ( - 1, - [ - (2, "Task-1", []), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ) + ] ) ] - expected_output = [[1, "0x2", "Task-1", "", "", "0x0"]] + expected_output = [[1, '0x2', 'Task-1', '', '', '', '0x0']] self.assertEqual(tools.build_task_table(result), expected_output) def test_cycle_detection(self): """Test build_async_tree raises CycleFoundException for cyclic input.""" result = [ - ( - 1, - [ - (2, "Task-1", [[["main"], 3]]), - (3, "Task-2", [[["main"], 2]]), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=3 + ) + ] + ), + TaskInfo( + task_id=3, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=2 + ) + ] + ) + ] ) ] with self.assertRaises(tools.CycleFoundException) as context: @@ -692,13 +1316,38 @@ class TestAsyncioToolsBasic(unittest.TestCase): def test_complex_tree(self): """Test build_async_tree with a more complex tree structure.""" result = [ - ( - 1, - [ - (2, "Task-1", []), - (3, "Task-2", [[["main"], 2]]), - (4, "Task-3", [[["main"], 3]]), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=3, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="Task-3", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=3 + ) + ] + ) + ] ) ] expected_output = [ @@ -715,30 +1364,76 @@ class TestAsyncioToolsBasic(unittest.TestCase): def test_complex_table(self): """Test build_task_table with a more complex tree structure.""" result = [ - ( - 1, - [ - (2, "Task-1", []), - (3, "Task-2", [[["main"], 2]]), - (4, "Task-3", [[["main"], 3]]), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=2, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=3, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=4, + task_name="Task-3", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("main", "", 0)], + task_name=3 + ) + ] + ) + ] ) ] expected_output = [ - [1, "0x2", "Task-1", "", "", "0x0"], - [1, "0x3", "Task-2", "main", "Task-1", "0x2"], - [1, "0x4", "Task-3", "main", "Task-2", "0x3"], + [1, '0x2', 'Task-1', '', '', '', '0x0'], + [1, '0x3', 'Task-2', '', 'main', 'Task-1', '0x2'], + [1, '0x4', 'Task-3', '', 'main', 'Task-2', '0x3'] ] self.assertEqual(tools.build_task_table(result), expected_output) def test_deep_coroutine_chain(self): input_ = [ - ( - 1, - [ - (10, "leaf", [[["c1", "c2", "c3", "c4", "c5"], 11]]), - (11, "root", []), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=10, + task_name="leaf", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("c1", "", 0), + FrameInfo("c2", "", 0), + FrameInfo("c3", "", 0), + FrameInfo("c4", "", 0), + FrameInfo("c5", "", 0) + ], + task_name=11 + ) + ] + ), + TaskInfo( + task_id=11, + task_name="root", + coroutine_stack=[], + awaited_by=[] + ) + ] ) ] expected = [ @@ -757,13 +1452,47 @@ class TestAsyncioToolsBasic(unittest.TestCase): def test_multiple_cycles_same_node(self): input_ = [ - ( - 1, - [ - (1, "Task-A", [[["call1"], 2]]), - (2, "Task-B", [[["call2"], 3]]), - (3, "Task-C", [[["call3"], 1], [["call4"], 2]]), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="Task-A", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("call1", "", 0)], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=2, + task_name="Task-B", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("call2", "", 0)], + task_name=3 + ) + ] + ), + TaskInfo( + task_id=3, + task_name="Task-C", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("call3", "", 0)], + task_name=1 + ), + CoroInfo( + call_stack=[FrameInfo("call4", "", 0)], + task_name=2 + ) + ] + ) + ] ) ] with self.assertRaises(tools.CycleFoundException) as ctx: @@ -772,19 +1501,43 @@ class TestAsyncioToolsBasic(unittest.TestCase): self.assertTrue(any(set(c) == {1, 2, 3} for c in cycles)) def test_table_output_format(self): - input_ = [(1, [(1, "Task-A", [[["foo"], 2]]), (2, "Task-B", [])])] + input_ = [ + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="Task-A", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("foo", "", 0)], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=2, + task_name="Task-B", + coroutine_stack=[], + awaited_by=[] + ) + ] + ) + ] table = tools.build_task_table(input_) for row in table: - self.assertEqual(len(row), 6) + self.assertEqual(len(row), 7) self.assertIsInstance(row[0], int) # thread ID self.assertTrue( isinstance(row[1], str) and row[1].startswith("0x") ) # hex task ID self.assertIsInstance(row[2], str) # task name - self.assertIsInstance(row[3], str) # coroutine chain - self.assertIsInstance(row[4], str) # awaiter name + self.assertIsInstance(row[3], str) # coroutine stack + self.assertIsInstance(row[4], str) # coroutine chain + self.assertIsInstance(row[5], str) # awaiter name self.assertTrue( - isinstance(row[5], str) and row[5].startswith("0x") + isinstance(row[6], str) and row[6].startswith("0x") ) # hex awaiter ID @@ -792,28 +1545,86 @@ class TestAsyncioToolsEdgeCases(unittest.TestCase): def test_task_awaits_self(self): """A task directly awaits itself - should raise a cycle.""" - input_ = [(1, [(1, "Self-Awaiter", [[["loopback"], 1]])])] + input_ = [ + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="Self-Awaiter", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("loopback", "", 0)], + task_name=1 + ) + ] + ) + ] + ) + ] with self.assertRaises(tools.CycleFoundException) as ctx: tools.build_async_tree(input_) self.assertIn([1, 1], ctx.exception.cycles) def test_task_with_missing_awaiter_id(self): """Awaiter ID not in task list - should not crash, just show 'Unknown'.""" - input_ = [(1, [(1, "Task-A", [[["coro"], 999]])])] # 999 not defined + input_ = [ + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="Task-A", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("coro", "", 0)], + task_name=999 + ) + ] + ) + ] + ) + ] table = tools.build_task_table(input_) self.assertEqual(len(table), 1) - self.assertEqual(table[0][4], "Unknown") + self.assertEqual(table[0][5], "Unknown") def test_duplicate_coroutine_frames(self): """Same coroutine frame repeated under a parent - should deduplicate.""" input_ = [ - ( - 1, - [ - (1, "Task-1", [[["frameA"], 2], [["frameA"], 3]]), - (2, "Task-2", []), - (3, "Task-3", []), - ], + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="Task-1", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("frameA", "", 0)], + task_name=2 + ), + CoroInfo( + call_stack=[FrameInfo("frameA", "", 0)], + task_name=3 + ) + ] + ), + TaskInfo( + task_id=2, + task_name="Task-2", + coroutine_stack=[], + awaited_by=[] + ), + TaskInfo( + task_id=3, + task_name="Task-3", + coroutine_stack=[], + awaited_by=[] + ) + ] ) ] tree = tools.build_async_tree(input_) @@ -830,14 +1641,63 @@ class TestAsyncioToolsEdgeCases(unittest.TestCase): def test_task_with_no_name(self): """Task with no name in id2name - should still render with fallback.""" - input_ = [(1, [(1, "root", [[["f1"], 2]]), (2, None, [])])] + input_ = [ + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="root", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[FrameInfo("f1", "", 0)], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=2, + task_name=None, + coroutine_stack=[], + awaited_by=[] + ) + ] + ) + ] # If name is None, fallback to string should not crash tree = tools.build_async_tree(input_) self.assertIn("(T) None", "\n".join(tree[0])) def test_tree_rendering_with_custom_emojis(self): """Pass custom emojis to the tree renderer.""" - input_ = [(1, [(1, "MainTask", [[["f1", "f2"], 2]]), (2, "SubTask", [])])] + input_ = [ + AwaitedInfo( + thread_id=1, + awaited_by=[ + TaskInfo( + task_id=1, + task_name="MainTask", + coroutine_stack=[], + awaited_by=[ + CoroInfo( + call_stack=[ + FrameInfo("f1", "", 0), + FrameInfo("f2", "", 0) + ], + task_name=2 + ) + ] + ), + TaskInfo( + task_id=2, + task_name="SubTask", + coroutine_stack=[], + awaited_by=[] + ) + ] + ) + ] tree = tools.build_async_tree(input_, task_emoji="🧵", cor_emoji="🔁") flat = "\n".join(tree[0]) self.assertIn("🧵 MainTask", flat) diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py index bf22ef2a592..3fc2cb33795 100644 --- a/Lib/test/test_capi/test_opt.py +++ b/Lib/test/test_capi/test_opt.py @@ -1381,6 +1381,21 @@ class TestUopsOptimization(unittest.TestCase): # Removed guard self.assertNotIn("_CHECK_FUNCTION_EXACT_ARGS", uops) + def test_method_guards_removed_or_reduced(self): + def testfunc(n): + result = 0 + for i in range(n): + result += test_bound_method(i) + return result + res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD) + self.assertEqual(res, sum(range(TIER2_THRESHOLD))) + self.assertIsNotNone(ex) + uops = get_opnames(ex) + self.assertIn("_PUSH_FRAME", uops) + # Strength reduced version + self.assertIn("_CHECK_FUNCTION_VERSION_INLINE", uops) + self.assertNotIn("_CHECK_METHOD_VERSION", uops) + def test_jit_error_pops(self): """ Tests that the correct number of pops are inserted into the @@ -2294,5 +2309,12 @@ class TestUopsOptimization(unittest.TestCase): def global_identity(x): return x +class TestObject: + def test(self, *args, **kwargs): + return args[0] + +test_object = TestObject() +test_bound_method = TestObject.test.__get__(test_object) + if __name__ == "__main__": unittest.main() diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py index 9fc2b047bef..655f5a9be7f 100644 --- a/Lib/test/test_code.py +++ b/Lib/test/test_code.py @@ -701,6 +701,7 @@ class CodeTest(unittest.TestCase): 'checks': CO_FAST_LOCAL, 'res': CO_FAST_LOCAL, }, + defs.spam_with_global_and_attr_same_name: {}, defs.spam_full_args: { 'a': POSONLY, 'b': POSONLY, @@ -955,6 +956,10 @@ class CodeTest(unittest.TestCase): purelocals=5, globalvars=6, ), + defs.spam_with_global_and_attr_same_name: new_var_counts( + globalvars=2, + attrs=1, + ), defs.spam_full_args: new_var_counts( posonly=2, posorkw=2, diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index 52c38e42eca..60c62430370 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -290,6 +290,38 @@ class DictTest(unittest.TestCase): ['Cannot convert dictionary update sequence element #0 to a sequence'], ) + def test_update_shared_keys(self): + class MyClass: pass + + # Subclass str to enable us to create an object during the + # dict.update() call. + class MyStr(str): + def __hash__(self): + return super().__hash__() + + def __eq__(self, other): + # Create an object that shares the same PyDictKeysObject as + # obj.__dict__. + obj2 = MyClass() + obj2.a = "a" + obj2.b = "b" + obj2.c = "c" + return super().__eq__(other) + + obj = MyClass() + obj.a = "a" + obj.b = "b" + + x = {} + x[MyStr("a")] = MyStr("a") + + # gh-132617: this previously raised "dict mutated during update" error + x.update(obj.__dict__) + + self.assertEqual(x, { + MyStr("a"): "a", + "b": "b", + }) def test_fromkeys(self): self.assertEqual(dict.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) diff --git a/Lib/test/test_email/test_email.py b/Lib/test/test_email/test_email.py index 8765d121fd0..b8116d073a2 100644 --- a/Lib/test/test_email/test_email.py +++ b/Lib/test/test_email/test_email.py @@ -2568,6 +2568,18 @@ Re: =?mac-iceland?q?r=8Aksm=9Arg=8Cs?= baz foo bar =?mac-iceland?q?r=8Aksm?= self.assertEqual(str(make_header(decode_header(s))), '"Müller T" <T.Mueller@xxx.com>') + def test_unencoded_ascii(self): + # bpo-22833/gh-67022: returns [(str, None)] rather than [(bytes, None)] + s = 'header without encoded words' + self.assertEqual(decode_header(s), + [('header without encoded words', None)]) + + def test_unencoded_utf8(self): + # bpo-22833/gh-67022: returns [(str, None)] rather than [(bytes, None)] + s = 'header with unexpected non ASCII caract\xe8res' + self.assertEqual(decode_header(s), + [('header with unexpected non ASCII caract\xe8res', None)]) + # Test the MIMEMessage class class TestMIMEMessage(TestEmailBase): diff --git a/Lib/test/test_external_inspection.py b/Lib/test/test_external_inspection.py index 303af25fc7a..90214e814f2 100644 --- a/Lib/test/test_external_inspection.py +++ b/Lib/test/test_external_inspection.py @@ -5,7 +5,7 @@ import importlib import sys import socket import threading -from asyncio import staggered, taskgroups +from asyncio import staggered, taskgroups, base_events, tasks from unittest.mock import ANY from test.support import os_helper, SHORT_TIMEOUT, busy_retry from test.support.script_helper import make_script @@ -18,8 +18,12 @@ PROCESS_VM_READV_SUPPORTED = False try: from _remote_debugging import PROCESS_VM_READV_SUPPORTED from _remote_debugging import RemoteUnwinder + from _remote_debugging import FrameInfo, CoroInfo, TaskInfo except ImportError: - raise unittest.SkipTest("Test only runs when _remote_debugging is available") + raise unittest.SkipTest( + "Test only runs when _remote_debugging is available" + ) + def _make_test_script(script_dir, script_basename, source): to_return = make_script(script_dir, script_basename, source) @@ -28,7 +32,11 @@ def _make_test_script(script_dir, script_basename, source): skip_if_not_supported = unittest.skipIf( - (sys.platform != "darwin" and sys.platform != "linux" and sys.platform != "win32"), + ( + sys.platform != "darwin" + and sys.platform != "linux" + and sys.platform != "win32" + ), "Test only runs on Linux, Windows and MacOS", ) @@ -101,11 +109,16 @@ class TestGetStackTrace(unittest.TestCase): client_socket, _ = server_socket.accept() server_socket.close() response = b"" - while b"ready:main" not in response or b"ready:thread" not in response: + while ( + b"ready:main" not in response + or b"ready:thread" not in response + ): response += client_socket.recv(1024) stack_trace = get_stack_trace(p.pid) except PermissionError: - self.skipTest("Insufficient permissions to read the stack trace") + self.skipTest( + "Insufficient permissions to read the stack trace" + ) finally: if client_socket is not None: client_socket.close() @@ -114,17 +127,17 @@ class TestGetStackTrace(unittest.TestCase): p.wait(timeout=SHORT_TIMEOUT) thread_expected_stack_trace = [ - (script_name, 15, "foo"), - (script_name, 12, "baz"), - (script_name, 9, "bar"), - (threading.__file__, ANY, 'Thread.run') + FrameInfo([script_name, 15, "foo"]), + FrameInfo([script_name, 12, "baz"]), + FrameInfo([script_name, 9, "bar"]), + FrameInfo([threading.__file__, ANY, "Thread.run"]), ] # Is possible that there are more threads, so we check that the # expected stack traces are in the result (looking at you Windows!) self.assertIn((ANY, thread_expected_stack_trace), stack_trace) # Check that the main thread stack trace is in the result - frame = (script_name, 19, "<module>") + frame = FrameInfo([script_name, 19, "<module>"]) for _, stack in stack_trace: if frame in stack: break @@ -189,8 +202,12 @@ class TestGetStackTrace(unittest.TestCase): ): script_dir = os.path.join(work_dir, "script_pkg") os.mkdir(script_dir) - server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + server_socket = socket.socket( + socket.AF_INET, socket.SOCK_STREAM + ) + server_socket.setsockopt( + socket.SOL_SOCKET, socket.SO_REUSEADDR, 1 + ) server_socket.bind(("localhost", port)) server_socket.settimeout(SHORT_TIMEOUT) server_socket.listen(1) @@ -208,7 +225,9 @@ class TestGetStackTrace(unittest.TestCase): self.assertEqual(response, b"ready") stack_trace = get_async_stack_trace(p.pid) except PermissionError: - self.skipTest("Insufficient permissions to read the stack trace") + self.skipTest( + "Insufficient permissions to read the stack trace" + ) finally: if client_socket is not None: client_socket.close() @@ -219,79 +238,49 @@ class TestGetStackTrace(unittest.TestCase): # sets are unordered, so we want to sort "awaited_by"s stack_trace[2].sort(key=lambda x: x[1]) - root_task = "Task-1" expected_stack_trace = [ [ - (script_name, 10, "c5"), - (script_name, 14, "c4"), - (script_name, 17, "c3"), - (script_name, 20, "c2"), + FrameInfo([script_name, 10, "c5"]), + FrameInfo([script_name, 14, "c4"]), + FrameInfo([script_name, 17, "c3"]), + FrameInfo([script_name, 20, "c2"]), ], "c2_root", [ - [ - [ - ( - taskgroups.__file__, - ANY, - "TaskGroup._aexit" - ), - ( - taskgroups.__file__, - ANY, - "TaskGroup.__aexit__" - ), - (script_name, 26, "main"), - ], - "Task-1", - [], - ], - [ - [(script_name, 23, "c1")], - "sub_main_1", + CoroInfo( [ [ - [ - ( + FrameInfo( + [ taskgroups.__file__, ANY, - "TaskGroup._aexit" - ), - ( + "TaskGroup._aexit", + ] + ), + FrameInfo( + [ taskgroups.__file__, ANY, - "TaskGroup.__aexit__" - ), - (script_name, 26, "main"), - ], - "Task-1", - [], - ] - ], - ], - [ - [(script_name, 23, "c1")], - "sub_main_2", + "TaskGroup.__aexit__", + ] + ), + FrameInfo([script_name, 26, "main"]), + ], + "Task-1", + ] + ), + CoroInfo( [ - [ - [ - ( - taskgroups.__file__, - ANY, - "TaskGroup._aexit" - ), - ( - taskgroups.__file__, - ANY, - "TaskGroup.__aexit__" - ), - (script_name, 26, "main"), - ], - "Task-1", - [], - ] - ], - ], + [FrameInfo([script_name, 23, "c1"])], + "sub_main_1", + ] + ), + CoroInfo( + [ + [FrameInfo([script_name, 23, "c1"])], + "sub_main_2", + ] + ), ], ] self.assertEqual(stack_trace, expected_stack_trace) @@ -350,7 +339,9 @@ class TestGetStackTrace(unittest.TestCase): self.assertEqual(response, b"ready") stack_trace = get_async_stack_trace(p.pid) except PermissionError: - self.skipTest("Insufficient permissions to read the stack trace") + self.skipTest( + "Insufficient permissions to read the stack trace" + ) finally: if client_socket is not None: client_socket.close() @@ -363,9 +354,9 @@ class TestGetStackTrace(unittest.TestCase): expected_stack_trace = [ [ - (script_name, 10, "gen_nested_call"), - (script_name, 16, "gen"), - (script_name, 19, "main"), + FrameInfo([script_name, 10, "gen_nested_call"]), + FrameInfo([script_name, 16, "gen"]), + FrameInfo([script_name, 19, "main"]), ], "Task-1", [], @@ -427,7 +418,9 @@ class TestGetStackTrace(unittest.TestCase): self.assertEqual(response, b"ready") stack_trace = get_async_stack_trace(p.pid) except PermissionError: - self.skipTest("Insufficient permissions to read the stack trace") + self.skipTest( + "Insufficient permissions to read the stack trace" + ) finally: if client_socket is not None: client_socket.close() @@ -439,9 +432,12 @@ class TestGetStackTrace(unittest.TestCase): stack_trace[2].sort(key=lambda x: x[1]) expected_stack_trace = [ - [(script_name, 11, "deep"), (script_name, 15, "c1")], + [ + FrameInfo([script_name, 11, "deep"]), + FrameInfo([script_name, 15, "c1"]), + ], "Task-2", - [[[(script_name, 21, "main")], "Task-1", []]], + [CoroInfo([[FrameInfo([script_name, 21, "main"])], "Task-1"])], ] self.assertEqual(stack_trace, expected_stack_trace) @@ -503,7 +499,9 @@ class TestGetStackTrace(unittest.TestCase): self.assertEqual(response, b"ready") stack_trace = get_async_stack_trace(p.pid) except PermissionError: - self.skipTest("Insufficient permissions to read the stack trace") + self.skipTest( + "Insufficient permissions to read the stack trace" + ) finally: if client_socket is not None: client_socket.close() @@ -515,20 +513,29 @@ class TestGetStackTrace(unittest.TestCase): stack_trace[2].sort(key=lambda x: x[1]) expected_stack_trace = [ [ - (script_name, 11, "deep"), - (script_name, 15, "c1"), - (staggered.__file__, ANY, "staggered_race.<locals>.run_one_coro"), + FrameInfo([script_name, 11, "deep"]), + FrameInfo([script_name, 15, "c1"]), + FrameInfo( + [ + staggered.__file__, + ANY, + "staggered_race.<locals>.run_one_coro", + ] + ), ], "Task-2", [ - [ + CoroInfo( [ - (staggered.__file__, ANY, "staggered_race"), - (script_name, 21, "main"), - ], - "Task-1", - [], - ] + [ + FrameInfo( + [staggered.__file__, ANY, "staggered_race"] + ), + FrameInfo([script_name, 21, "main"]), + ], + "Task-1", + ] + ) ], ] self.assertEqual(stack_trace, expected_stack_trace) @@ -659,62 +666,174 @@ class TestGetStackTrace(unittest.TestCase): # expected: at least 1000 pending tasks self.assertGreaterEqual(len(entries), 1000) # the first three tasks stem from the code structure - self.assertIn((ANY, "Task-1", []), entries) main_stack = [ - ( - taskgroups.__file__, - ANY, - "TaskGroup._aexit", + FrameInfo([taskgroups.__file__, ANY, "TaskGroup._aexit"]), + FrameInfo( + [taskgroups.__file__, ANY, "TaskGroup.__aexit__"] ), - ( - taskgroups.__file__, - ANY, - "TaskGroup.__aexit__", - ), - (script_name, 60, "main"), + FrameInfo([script_name, 60, "main"]), ] self.assertIn( - (ANY, "server task", [[main_stack, ANY]]), + TaskInfo( + [ANY, "Task-1", [CoroInfo([main_stack, ANY])], []] + ), entries, ) self.assertIn( - (ANY, "echo client spam", [[main_stack, ANY]]), + TaskInfo( + [ + ANY, + "server task", + [ + CoroInfo( + [ + [ + FrameInfo( + [ + base_events.__file__, + ANY, + "Server.serve_forever", + ] + ) + ], + ANY, + ] + ) + ], + [ + CoroInfo( + [ + [ + FrameInfo( + [ + taskgroups.__file__, + ANY, + "TaskGroup._aexit", + ] + ), + FrameInfo( + [ + taskgroups.__file__, + ANY, + "TaskGroup.__aexit__", + ] + ), + FrameInfo( + [script_name, ANY, "main"] + ), + ], + ANY, + ] + ) + ], + ] + ), + entries, + ) + self.assertIn( + TaskInfo( + [ + ANY, + "Task-4", + [ + CoroInfo( + [ + [ + FrameInfo( + [tasks.__file__, ANY, "sleep"] + ), + FrameInfo( + [ + script_name, + 38, + "echo_client", + ] + ), + ], + ANY, + ] + ) + ], + [ + CoroInfo( + [ + [ + FrameInfo( + [ + taskgroups.__file__, + ANY, + "TaskGroup._aexit", + ] + ), + FrameInfo( + [ + taskgroups.__file__, + ANY, + "TaskGroup.__aexit__", + ] + ), + FrameInfo( + [ + script_name, + 41, + "echo_client_spam", + ] + ), + ], + ANY, + ] + ) + ], + ] + ), entries, ) - expected_stack = [ - [ + expected_awaited_by = [ + CoroInfo( [ - ( - taskgroups.__file__, - ANY, - "TaskGroup._aexit", - ), - ( - taskgroups.__file__, - ANY, - "TaskGroup.__aexit__", - ), - (script_name, 41, "echo_client_spam"), - ], - ANY, - ] + [ + FrameInfo( + [ + taskgroups.__file__, + ANY, + "TaskGroup._aexit", + ] + ), + FrameInfo( + [ + taskgroups.__file__, + ANY, + "TaskGroup.__aexit__", + ] + ), + FrameInfo( + [script_name, 41, "echo_client_spam"] + ), + ], + ANY, + ] + ) ] - tasks_with_stack = [ - task for task in entries if task[2] == expected_stack + tasks_with_awaited = [ + task + for task in entries + if task.awaited_by == expected_awaited_by ] - self.assertGreaterEqual(len(tasks_with_stack), 1000) + self.assertGreaterEqual(len(tasks_with_awaited), 1000) # the final task will have some random number, but it should for # sure be one of the echo client spam horde (In windows this is not true # for some reason) if sys.platform != "win32": self.assertEqual( - expected_stack, - entries[-1][2], + tasks_with_awaited[-1].awaited_by, + entries[-1].awaited_by, ) except PermissionError: - self.skipTest("Insufficient permissions to read the stack trace") + self.skipTest( + "Insufficient permissions to read the stack trace" + ) finally: if client_socket is not None: client_socket.close() @@ -740,17 +859,21 @@ class TestGetStackTrace(unittest.TestCase): self.assertEqual( stack[:2], [ - ( - __file__, - get_stack_trace.__code__.co_firstlineno + 2, - "get_stack_trace", + FrameInfo( + [ + __file__, + get_stack_trace.__code__.co_firstlineno + 2, + "get_stack_trace", + ] ), - ( - __file__, - self.test_self_trace.__code__.co_firstlineno + 6, - "TestGetStackTrace.test_self_trace", + FrameInfo( + [ + __file__, + self.test_self_trace.__code__.co_firstlineno + 6, + "TestGetStackTrace.test_self_trace", + ] ), - ] + ], ) diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index dd58e032a8b..f5455705678 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -1380,7 +1380,7 @@ x = ( for conv in ' s', ' s ': self.assertAllRaise(SyntaxError, "f-string: conversion type must come right after the" - " exclamanation mark", + " exclamation mark", ["f'{3!" + conv + "}'"]) self.assertAllRaise(SyntaxError, diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index fa5b1e43816..e672dfcbb46 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -1036,7 +1036,7 @@ class TestTCPServer(ControlMixin, ThreadingTCPServer): """ allow_reuse_address = True - allow_reuse_port = True + allow_reuse_port = False def __init__(self, addr, handler, poll_interval=0.5, bind_and_activate=True): diff --git a/Lib/test/test_unittest/testmock/testhelpers.py b/Lib/test/test_unittest/testmock/testhelpers.py index d1e48bde982..0e82c723ec3 100644 --- a/Lib/test/test_unittest/testmock/testhelpers.py +++ b/Lib/test/test_unittest/testmock/testhelpers.py @@ -1050,6 +1050,7 @@ class SpecSignatureTest(unittest.TestCase): create_autospec(WithPostInit()), ]: with self.subTest(mock=mock): + self.assertIsInstance(mock, WithPostInit) self.assertIsInstance(mock.a, int) self.assertIsInstance(mock.b, int) @@ -1072,6 +1073,7 @@ class SpecSignatureTest(unittest.TestCase): create_autospec(WithDefault(1)), ]: with self.subTest(mock=mock): + self.assertIsInstance(mock, WithDefault) self.assertIsInstance(mock.a, int) self.assertIsInstance(mock.b, int) @@ -1087,6 +1089,7 @@ class SpecSignatureTest(unittest.TestCase): create_autospec(WithMethod(1)), ]: with self.subTest(mock=mock): + self.assertIsInstance(mock, WithMethod) self.assertIsInstance(mock.a, int) mock.b.assert_not_called() @@ -1102,11 +1105,29 @@ class SpecSignatureTest(unittest.TestCase): create_autospec(WithNonFields(1)), ]: with self.subTest(mock=mock): + self.assertIsInstance(mock, WithNonFields) with self.assertRaisesRegex(AttributeError, msg): mock.a with self.assertRaisesRegex(AttributeError, msg): mock.b + def test_dataclass_special_attrs(self): + @dataclass + class Description: + name: str + + for mock in [ + create_autospec(Description, instance=True), + create_autospec(Description(1)), + ]: + with self.subTest(mock=mock): + self.assertIsInstance(mock, Description) + self.assertIs(mock.__class__, Description) + self.assertIsInstance(mock.__dataclass_fields__, MagicMock) + self.assertIsInstance(mock.__dataclass_params__, MagicMock) + self.assertIsInstance(mock.__match_args__, MagicMock) + self.assertIsInstance(mock.__hash__, MagicMock) + class TestCallList(unittest.TestCase): def test_args_list_contains_call_list(self): diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py index 55cb4b1f6af..e370aa48b7c 100644 --- a/Lib/unittest/mock.py +++ b/Lib/unittest/mock.py @@ -569,6 +569,11 @@ class NonCallableMock(Base): __dict__['_mock_methods'] = spec __dict__['_spec_asyncs'] = _spec_asyncs + def _mock_extend_spec_methods(self, spec_methods): + methods = self.__dict__.get('_mock_methods') or [] + methods.extend(spec_methods) + self.__dict__['_mock_methods'] = methods + def __get_return_value(self): ret = self._mock_return_value if self._mock_delegate is not None: @@ -2766,14 +2771,16 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, raise InvalidSpecError(f'Cannot autospec a Mock object. ' f'[object={spec!r}]') is_async_func = _is_async_func(spec) + _kwargs = {'spec': spec} entries = [(entry, _missing) for entry in dir(spec)] if is_type and instance and is_dataclass(spec): + is_dataclass_spec = True dataclass_fields = fields(spec) entries.extend((f.name, f.type) for f in dataclass_fields) - _kwargs = {'spec': [f.name for f in dataclass_fields]} + dataclass_spec_list = [f.name for f in dataclass_fields] else: - _kwargs = {'spec': spec} + is_dataclass_spec = False if spec_set: _kwargs = {'spec_set': spec} @@ -2810,6 +2817,8 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None, mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name, name=_name, **_kwargs) + if is_dataclass_spec: + mock._mock_extend_spec_methods(dataclass_spec_list) if isinstance(spec, FunctionTypes): # should only happen at the top level because we don't diff --git a/Lib/xmlrpc/server.py b/Lib/xmlrpc/server.py index 90a356fbb8e..8130c739af2 100644 --- a/Lib/xmlrpc/server.py +++ b/Lib/xmlrpc/server.py @@ -578,7 +578,7 @@ class SimpleXMLRPCServer(socketserver.TCPServer, """ allow_reuse_address = True - allow_reuse_port = True + allow_reuse_port = False # Warning: this is for debugging purposes only! Never set this to True in # production code, as will be sending out sensitive information (exception diff --git a/Misc/NEWS.d/3.14.0a6.rst b/Misc/NEWS.d/3.14.0a6.rst index bafd8845de6..d8840b6f283 100644 --- a/Misc/NEWS.d/3.14.0a6.rst +++ b/Misc/NEWS.d/3.14.0a6.rst @@ -1325,7 +1325,7 @@ variable. .. nonce: d75n8U .. section: Core and Builtins -Adapt :func:`reversed` for use in the free-theading build. The +Adapt :func:`reversed` for use in the free-threading build. The :func:`reversed` is still not thread-safe in the sense that concurrent iterations may see the same object, but they will not corrupt the interpreter state. diff --git a/Misc/NEWS.d/next/Build/2025-06-14-10-32-11.gh-issue-135497.ajlV4F.rst b/Misc/NEWS.d/next/Build/2025-06-14-10-32-11.gh-issue-135497.ajlV4F.rst new file mode 100644 index 00000000000..c84663b1466 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2025-06-14-10-32-11.gh-issue-135497.ajlV4F.rst @@ -0,0 +1 @@ +Fix the detection of ``MAXLOGNAME`` in the ``configure.ac`` script. diff --git a/Misc/NEWS.d/next/Build/2025-06-16-07-20-28.gh-issue-119132.fcI8s7.rst b/Misc/NEWS.d/next/Build/2025-06-16-07-20-28.gh-issue-119132.fcI8s7.rst new file mode 100644 index 00000000000..3eb0805b9ce --- /dev/null +++ b/Misc/NEWS.d/next/Build/2025-06-16-07-20-28.gh-issue-119132.fcI8s7.rst @@ -0,0 +1 @@ +Remove "experimental" tag from the CPython free-threading build. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-20-29-00.gh-issue-132617.EmUfQQ.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-20-29-00.gh-issue-132617.EmUfQQ.rst new file mode 100644 index 00000000000..53aef541e64 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-20-29-00.gh-issue-132617.EmUfQQ.rst @@ -0,0 +1,3 @@ +Fix :meth:`dict.update` modification check that could incorrectly raise a +"dict mutated during update" error when a different dictionary was modified +that happens to share the same underlying keys object. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-02-20-13-37.gh-issue-131798.JQRFvR.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-02-20-13-37.gh-issue-131798.JQRFvR.rst new file mode 100644 index 00000000000..0e68c793e5e --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-02-20-13-37.gh-issue-131798.JQRFvR.rst @@ -0,0 +1 @@ +Optimize ``_CHECK_METHOD_VERSION`` into ``_CHECK_FUNCTION_VERSION_INLINE`` in JIT-compiled code. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-11-15-08-10.gh-issue-127319.OVGFSZ.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-11-15-08-10.gh-issue-127319.OVGFSZ.rst new file mode 100644 index 00000000000..d90153c9684 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-11-15-08-10.gh-issue-127319.OVGFSZ.rst @@ -0,0 +1,3 @@ +Set the ``allow_reuse_port`` class variable to ``False`` on the XMLRPC, +logging, and HTTP servers. This matches the behavior in prior Python +releases, which is to not allow port reuse. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-12-18-12-42.gh-issue-135371.R_YUtR.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-12-18-12-42.gh-issue-135371.R_YUtR.rst new file mode 100644 index 00000000000..9f2e825e57b --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-12-18-12-42.gh-issue-135371.R_YUtR.rst @@ -0,0 +1,4 @@ +Fixed :mod:`asyncio` debugging tools to properly display internal coroutine +call stacks alongside external task dependencies. The ``python -m asyncio +ps`` and ``python -m asyncio pstree`` commands now show complete execution +context. Patch by Pablo Galindo. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-13-16-05-24.gh-issue-135474.67nOl3.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-13-16-05-24.gh-issue-135474.67nOl3.rst new file mode 100644 index 00000000000..716d9b78748 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-13-16-05-24.gh-issue-135474.67nOl3.rst @@ -0,0 +1 @@ +Specialize integer operations only on compact integers. This is a CPython internal change. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-14-01-01-14.gh-issue-135496.ER0Me3.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-14-01-01-14.gh-issue-135496.ER0Me3.rst new file mode 100644 index 00000000000..03b1f4590c5 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-14-01-01-14.gh-issue-135496.ER0Me3.rst @@ -0,0 +1 @@ +Fix typo in the f-string conversion type error ("exclamanation" -> "exclamation"). diff --git a/Misc/NEWS.d/next/Documentation/2025-06-10-17-02-06.gh-issue-135171.quHvts.rst b/Misc/NEWS.d/next/Documentation/2025-06-10-17-02-06.gh-issue-135171.quHvts.rst new file mode 100644 index 00000000000..129ff74189b --- /dev/null +++ b/Misc/NEWS.d/next/Documentation/2025-06-10-17-02-06.gh-issue-135171.quHvts.rst @@ -0,0 +1,2 @@ +Document that the :term:`iterator` for the leftmost :keyword:`!for` clause +in the generator expression is created immediately. diff --git a/Misc/NEWS.d/next/Library/2025-06-12-10-45-02.gh-issue-135368.OjWVHL.rst b/Misc/NEWS.d/next/Library/2025-06-12-10-45-02.gh-issue-135368.OjWVHL.rst new file mode 100644 index 00000000000..b9973d88a85 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-06-12-10-45-02.gh-issue-135368.OjWVHL.rst @@ -0,0 +1,2 @@ +Fix :class:`unittest.mock.Mock` generation on :func:`dataclasses.dataclass` +objects. Now all special attributes are set as it was before :gh:`124429`. diff --git a/Misc/NEWS.d/next/Library/2025-06-14-14-19-13.gh-issue-135497.1pzwdA.rst b/Misc/NEWS.d/next/Library/2025-06-14-14-19-13.gh-issue-135497.1pzwdA.rst new file mode 100644 index 00000000000..d3e81de9dbf --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-06-14-14-19-13.gh-issue-135497.1pzwdA.rst @@ -0,0 +1 @@ +Fix :func:`os.getlogin` failing for longer usernames on BSD-based platforms. diff --git a/Misc/NEWS.d/next/Library/2025-06-15-03-03-22.gh-issue-65697.COdwZd.rst b/Misc/NEWS.d/next/Library/2025-06-15-03-03-22.gh-issue-65697.COdwZd.rst new file mode 100644 index 00000000000..d374220d02f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-06-15-03-03-22.gh-issue-65697.COdwZd.rst @@ -0,0 +1 @@ +:class:`configparser`'s error message when attempting to write an invalid key is now more helpful. diff --git a/Modules/_remote_debugging_module.c b/Modules/_remote_debugging_module.c index ea58f38006e..c2421cac6bd 100644 --- a/Modules/_remote_debugging_module.c +++ b/Modules/_remote_debugging_module.c @@ -39,6 +39,8 @@ * ============================================================================ */ #define GET_MEMBER(type, obj, offset) (*(type*)((char*)(obj) + (offset))) +#define CLEAR_PTR_TAG(ptr) (((uintptr_t)(ptr) & ~Py_TAG_BITS)) +#define GET_MEMBER_NO_TAG(type, obj, offset) (type)(CLEAR_PTR_TAG(*(type*)((char*)(obj) + (offset)))) /* Size macros for opaque buffers */ #define SIZEOF_BYTES_OBJ sizeof(PyBytesObject) @@ -97,6 +99,101 @@ struct _Py_AsyncioModuleDebugOffsets { } asyncio_thread_state; }; +/* ============================================================================ + * STRUCTSEQ TYPE DEFINITIONS + * ============================================================================ */ + +// TaskInfo structseq type - replaces 4-tuple (task_id, task_name, coroutine_stack, awaited_by) +static PyStructSequence_Field TaskInfo_fields[] = { + {"task_id", "Task ID (memory address)"}, + {"task_name", "Task name"}, + {"coroutine_stack", "Coroutine call stack"}, + {"awaited_by", "Tasks awaiting this task"}, + {NULL} +}; + +static PyStructSequence_Desc TaskInfo_desc = { + "_remote_debugging.TaskInfo", + "Information about an asyncio task", + TaskInfo_fields, + 4 +}; + +// FrameInfo structseq type - replaces 3-tuple (filename, lineno, funcname) +static PyStructSequence_Field FrameInfo_fields[] = { + {"filename", "Source code filename"}, + {"lineno", "Line number"}, + {"funcname", "Function name"}, + {NULL} +}; + +static PyStructSequence_Desc FrameInfo_desc = { + "_remote_debugging.FrameInfo", + "Information about a frame", + FrameInfo_fields, + 3 +}; + +// CoroInfo structseq type - replaces 2-tuple (call_stack, task_name) +static PyStructSequence_Field CoroInfo_fields[] = { + {"call_stack", "Coroutine call stack"}, + {"task_name", "Task name"}, + {NULL} +}; + +static PyStructSequence_Desc CoroInfo_desc = { + "_remote_debugging.CoroInfo", + "Information about a coroutine", + CoroInfo_fields, + 2 +}; + +// ThreadInfo structseq type - replaces 2-tuple (thread_id, frame_info) +static PyStructSequence_Field ThreadInfo_fields[] = { + {"thread_id", "Thread ID"}, + {"frame_info", "Frame information"}, + {NULL} +}; + +static PyStructSequence_Desc ThreadInfo_desc = { + "_remote_debugging.ThreadInfo", + "Information about a thread", + ThreadInfo_fields, + 2 +}; + +// AwaitedInfo structseq type - replaces 2-tuple (tid, awaited_by_list) +static PyStructSequence_Field AwaitedInfo_fields[] = { + {"thread_id", "Thread ID"}, + {"awaited_by", "List of tasks awaited by this thread"}, + {NULL} +}; + +static PyStructSequence_Desc AwaitedInfo_desc = { + "_remote_debugging.AwaitedInfo", + "Information about what a thread is awaiting", + AwaitedInfo_fields, + 2 +}; + +typedef struct { + PyObject *func_name; + PyObject *file_name; + int first_lineno; + PyObject *linetable; // bytes + uintptr_t addr_code_adaptive; +} CachedCodeMetadata; + +typedef struct { + /* Types */ + PyTypeObject *RemoteDebugging_Type; + PyTypeObject *TaskInfo_Type; + PyTypeObject *FrameInfo_Type; + PyTypeObject *CoroInfo_Type; + PyTypeObject *ThreadInfo_Type; + PyTypeObject *AwaitedInfo_Type; +} RemoteDebuggingState; + typedef struct { PyObject_HEAD proc_handle_t handle; @@ -109,6 +206,7 @@ typedef struct { uint64_t code_object_generation; _Py_hashtable_t *code_object_cache; int debug; + RemoteDebuggingState *cached_state; // Cached module state #ifdef Py_GIL_DISABLED // TLBC cache invalidation tracking uint32_t tlbc_generation; // Track TLBC index pool changes @@ -116,18 +214,7 @@ typedef struct { #endif } RemoteUnwinderObject; -typedef struct { - PyObject *func_name; - PyObject *file_name; - int first_lineno; - PyObject *linetable; // bytes - uintptr_t addr_code_adaptive; -} CachedCodeMetadata; - -typedef struct { - /* Types */ - PyTypeObject *RemoteDebugging_Type; -} RemoteDebuggingState; +#define RemoteUnwinder_CAST(op) ((RemoteUnwinderObject *)(op)) typedef struct { @@ -160,6 +247,13 @@ module _remote_debugging * FORWARD DECLARATIONS * ============================================================================ */ +static inline int +is_frame_valid( + RemoteUnwinderObject *unwinder, + uintptr_t frame_addr, + uintptr_t code_object_addr +); + static int parse_tasks_in_set( RemoteUnwinderObject *unwinder, @@ -218,6 +312,24 @@ RemoteDebugging_GetState(PyObject *module) return (RemoteDebuggingState *)state; } +static inline RemoteDebuggingState * +RemoteDebugging_GetStateFromType(PyTypeObject *type) +{ + PyObject *module = PyType_GetModule(type); + assert(module != NULL); + return RemoteDebugging_GetState(module); +} + +static inline RemoteDebuggingState * +RemoteDebugging_GetStateFromObject(PyObject *obj) +{ + RemoteUnwinderObject *unwinder = (RemoteUnwinderObject *)obj; + if (unwinder->cached_state == NULL) { + unwinder->cached_state = RemoteDebugging_GetStateFromType(Py_TYPE(obj)); + } + return unwinder->cached_state; +} + static inline int RemoteDebugging_InitState(RemoteDebuggingState *st) { @@ -633,8 +745,7 @@ parse_task_name( return NULL; } - uintptr_t task_name_addr = GET_MEMBER(uintptr_t, task_obj, unwinder->async_debug_offsets.asyncio_task_object.task_name); - task_name_addr &= ~Py_TAG_BITS; + uintptr_t task_name_addr = GET_MEMBER_NO_TAG(uintptr_t, task_obj, unwinder->async_debug_offsets.asyncio_task_object.task_name); // The task name can be a long or a string so we need to check the type char task_name_obj[SIZEOF_PYOBJECT]; @@ -697,8 +808,7 @@ static int parse_task_awaited_by( return -1; } - uintptr_t task_ab_addr = GET_MEMBER(uintptr_t, task_obj, unwinder->async_debug_offsets.asyncio_task_object.task_awaited_by); - task_ab_addr &= ~Py_TAG_BITS; + uintptr_t task_ab_addr = GET_MEMBER_NO_TAG(uintptr_t, task_obj, unwinder->async_debug_offsets.asyncio_task_object.task_awaited_by); if ((void*)task_ab_addr == NULL) { return 0; @@ -748,8 +858,7 @@ handle_yield_from_frame( return -1; } - uintptr_t stackpointer_addr = GET_MEMBER(uintptr_t, iframe, unwinder->debug_offsets.interpreter_frame.stackpointer); - stackpointer_addr &= ~Py_TAG_BITS; + uintptr_t stackpointer_addr = GET_MEMBER_NO_TAG(uintptr_t, iframe, unwinder->debug_offsets.interpreter_frame.stackpointer); if ((void*)stackpointer_addr != NULL) { uintptr_t gi_await_addr; @@ -816,6 +925,11 @@ parse_coro_chain( return -1; } + int8_t frame_state = GET_MEMBER(int8_t, gen_object, unwinder->debug_offsets.gen_object.gi_frame_state); + if (frame_state == FRAME_CLEARED) { + return 0; + } + uintptr_t gen_type_addr = GET_MEMBER(uintptr_t, gen_object, unwinder->debug_offsets.pyobject.ob_type); PyObject* name = NULL; @@ -835,7 +949,7 @@ parse_coro_chain( } Py_DECREF(name); - if (GET_MEMBER(int8_t, gen_object, unwinder->debug_offsets.gen_object.gi_frame_state) == FRAME_SUSPENDED_YIELD_FROM) { + if (frame_state == FRAME_SUSPENDED_YIELD_FROM) { return handle_yield_from_frame(unwinder, gi_iframe_addr, gen_type_addr, render_to); } @@ -854,24 +968,14 @@ create_task_result( char task_obj[SIZEOF_TASK_OBJ]; uintptr_t coro_addr; - result = PyList_New(0); - if (result == NULL) { - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create task result list"); - goto error; - } - + // Create call_stack first since it's the first tuple element call_stack = PyList_New(0); if (call_stack == NULL) { set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create call stack list"); goto error; } - if (PyList_Append(result, call_stack)) { - set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append call stack to task result"); - goto error; - } - Py_CLEAR(call_stack); - + // Create task name/address for second tuple element if (recurse_task) { tn = parse_task_name(unwinder, task_address); } else { @@ -882,12 +986,6 @@ create_task_result( goto error; } - if (PyList_Append(result, tn)) { - set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append task name to result"); - goto error; - } - Py_CLEAR(tn); - // Parse coroutine chain if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, task_address, unwinder->async_debug_offsets.asyncio_task_object.size, @@ -896,35 +994,32 @@ create_task_result( goto error; } - coro_addr = GET_MEMBER(uintptr_t, task_obj, unwinder->async_debug_offsets.asyncio_task_object.task_coro); - coro_addr &= ~Py_TAG_BITS; + coro_addr = GET_MEMBER_NO_TAG(uintptr_t, task_obj, unwinder->async_debug_offsets.asyncio_task_object.task_coro); if ((void*)coro_addr != NULL) { - call_stack = PyList_New(0); - if (call_stack == NULL) { - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create coro call stack list"); - goto error; - } - if (parse_coro_chain(unwinder, coro_addr, call_stack) < 0) { - Py_DECREF(call_stack); set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse coroutine chain"); goto error; } if (PyList_Reverse(call_stack)) { - Py_DECREF(call_stack); set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to reverse call stack"); goto error; } + } - if (PyList_SetItem(result, 0, call_stack) < 0) { - Py_DECREF(call_stack); - set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to set call stack in result"); - goto error; - } + // Create final CoroInfo result + RemoteDebuggingState *state = RemoteDebugging_GetStateFromObject((PyObject*)unwinder); + result = PyStructSequence_New(state->CoroInfo_Type); + if (result == NULL) { + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create CoroInfo"); + goto error; } + // PyStructSequence_SetItem steals references, so we don't need to DECREF on success + PyStructSequence_SetItem(result, 0, call_stack); // This steals the reference + PyStructSequence_SetItem(result, 1, tn); // This steals the reference + return result; error: @@ -943,7 +1038,6 @@ parse_task( ) { char is_task; PyObject* result = NULL; - PyObject* awaited_by = NULL; int err; err = read_char( @@ -962,48 +1056,37 @@ parse_task( goto error; } } else { - result = PyList_New(0); + // Create an empty CoroInfo for non-task objects + RemoteDebuggingState *state = RemoteDebugging_GetStateFromObject((PyObject*)unwinder); + result = PyStructSequence_New(state->CoroInfo_Type); if (result == NULL) { - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create empty task result"); + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create empty CoroInfo"); goto error; } - } - - if (PyList_Append(render_to, result)) { - set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append task result to render list"); - goto error; - } - - if (recurse_task) { - awaited_by = PyList_New(0); - if (awaited_by == NULL) { - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by list"); + PyObject *empty_list = PyList_New(0); + if (empty_list == NULL) { + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create empty list"); goto error; } - - if (PyList_Append(result, awaited_by)) { - set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append awaited_by to result"); + PyObject *task_name = PyLong_FromUnsignedLongLong(task_address); + if (task_name == NULL) { + Py_DECREF(empty_list); + set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create task name"); goto error; } - Py_DECREF(awaited_by); + PyStructSequence_SetItem(result, 0, empty_list); // This steals the reference + PyStructSequence_SetItem(result, 1, task_name); // This steals the reference + } - /* awaited_by is borrowed from 'result' to simplify cleanup */ - if (parse_task_awaited_by(unwinder, task_address, awaited_by, 1) < 0) { - // Clear the pointer so the cleanup doesn't try to decref it since - // it's borrowed from 'result' and will be decrefed when result is - // deleted. - awaited_by = NULL; - set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse task awaited_by relationships"); - goto error; - } + if (PyList_Append(render_to, result)) { + set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append task result to render list"); + goto error; } Py_DECREF(result); - return 0; error: Py_XDECREF(result); - Py_XDECREF(awaited_by); return -1; } @@ -1161,6 +1244,7 @@ process_single_task_node( PyObject *current_awaited_by = NULL; PyObject *task_id = NULL; PyObject *result_item = NULL; + PyObject *coroutine_stack = NULL; tn = parse_task_name(unwinder, task_addr); if (tn == NULL) { @@ -1174,25 +1258,40 @@ process_single_task_node( goto error; } + // Extract the coroutine stack for this task + coroutine_stack = PyList_New(0); + if (coroutine_stack == NULL) { + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create coroutine stack list in single task node"); + goto error; + } + + if (parse_task(unwinder, task_addr, coroutine_stack, 0) < 0) { + set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse task coroutine stack in single task node"); + goto error; + } + task_id = PyLong_FromUnsignedLongLong(task_addr); if (task_id == NULL) { set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create task ID in single task node"); goto error; } - result_item = PyTuple_New(3); + RemoteDebuggingState *state = RemoteDebugging_GetStateFromObject((PyObject*)unwinder); + result_item = PyStructSequence_New(state->TaskInfo_Type); if (result_item == NULL) { - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create result tuple in single task node"); + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create TaskInfo in single task node"); goto error; } - PyTuple_SET_ITEM(result_item, 0, task_id); // steals ref - PyTuple_SET_ITEM(result_item, 1, tn); // steals ref - PyTuple_SET_ITEM(result_item, 2, current_awaited_by); // steals ref + PyStructSequence_SetItem(result_item, 0, task_id); // steals ref + PyStructSequence_SetItem(result_item, 1, tn); // steals ref + PyStructSequence_SetItem(result_item, 2, coroutine_stack); // steals ref + PyStructSequence_SetItem(result_item, 3, current_awaited_by); // steals ref // References transferred to tuple task_id = NULL; tn = NULL; + coroutine_stack = NULL; current_awaited_by = NULL; if (PyList_Append(result, result_item)) { @@ -1203,9 +1302,11 @@ process_single_task_node( Py_DECREF(result_item); // Get back current_awaited_by reference for parse_task_awaited_by - current_awaited_by = PyTuple_GET_ITEM(result_item, 2); + current_awaited_by = PyStructSequence_GetItem(result_item, 3); if (parse_task_awaited_by(unwinder, task_addr, current_awaited_by, 0) < 0) { set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse awaited_by in single task node"); + // No cleanup needed here since all references were transferred to result_item + // and result_item was already added to result list and decreffed return -1; } @@ -1216,6 +1317,7 @@ error: Py_XDECREF(current_awaited_by); Py_XDECREF(task_id); Py_XDECREF(result_item); + Py_XDECREF(coroutine_stack); return -1; } @@ -1554,17 +1656,18 @@ done_tlbc: goto error; } - tuple = PyTuple_New(3); + RemoteDebuggingState *state = RemoteDebugging_GetStateFromObject((PyObject*)unwinder); + tuple = PyStructSequence_New(state->FrameInfo_Type); if (!tuple) { - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create result tuple for code object"); + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create FrameInfo for code object"); goto error; } Py_INCREF(meta->func_name); Py_INCREF(meta->file_name); - PyTuple_SET_ITEM(tuple, 0, meta->file_name); - PyTuple_SET_ITEM(tuple, 1, lineno); - PyTuple_SET_ITEM(tuple, 2, meta->func_name); + PyStructSequence_SetItem(tuple, 0, meta->file_name); + PyStructSequence_SetItem(tuple, 1, lineno); + PyStructSequence_SetItem(tuple, 2, meta->func_name); *result = tuple; return 0; @@ -1725,10 +1828,10 @@ parse_frame_from_chunks( char *frame = (char *)frame_ptr; *previous_frame = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.previous); - - if (GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) >= FRAME_OWNED_BY_INTERPRETER || - !GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable)) { - return 0; + uintptr_t code_object = GET_MEMBER_NO_TAG(uintptr_t, frame_ptr, unwinder->debug_offsets.interpreter_frame.executable); + int frame_valid = is_frame_valid(unwinder, (uintptr_t)frame, code_object); + if (frame_valid != 1) { + return frame_valid; } uintptr_t instruction_pointer = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.instr_ptr); @@ -1741,9 +1844,7 @@ parse_frame_from_chunks( } #endif - return parse_code_object( - unwinder, result, GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable), - instruction_pointer, previous_frame, tlbc_index); + return parse_code_object(unwinder, result, code_object, instruction_pointer, previous_frame, tlbc_index); } /* ============================================================================ @@ -1986,6 +2087,33 @@ find_running_task_and_coro( * FRAME PARSING FUNCTIONS * ============================================================================ */ +static inline int +is_frame_valid( + RemoteUnwinderObject *unwinder, + uintptr_t frame_addr, + uintptr_t code_object_addr +) { + if ((void*)code_object_addr == NULL) { + return 0; + } + + void* frame = (void*)frame_addr; + + if (GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) == FRAME_OWNED_BY_CSTACK || + GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) == FRAME_OWNED_BY_INTERPRETER) { + return 0; // C frame + } + + if (GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) != FRAME_OWNED_BY_GENERATOR + && GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) != FRAME_OWNED_BY_THREAD) { + PyErr_Format(PyExc_RuntimeError, "Unhandled frame owner %d.\n", + GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner)); + set_exception_cause(unwinder, PyExc_RuntimeError, "Unhandled frame owner type in async frame"); + return -1; + } + return 1; +} + static int parse_frame_object( RemoteUnwinderObject *unwinder, @@ -2007,13 +2135,10 @@ parse_frame_object( } *previous_frame = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.previous); - - if (GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) >= FRAME_OWNED_BY_INTERPRETER) { - return 0; - } - - if ((void*)GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable) == NULL) { - return 0; + uintptr_t code_object = GET_MEMBER_NO_TAG(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable); + int frame_valid = is_frame_valid(unwinder, (uintptr_t)frame, code_object); + if (frame_valid != 1) { + return frame_valid; } uintptr_t instruction_pointer = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.instr_ptr); @@ -2026,9 +2151,7 @@ parse_frame_object( } #endif - return parse_code_object( - unwinder, result, GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable), - instruction_pointer, previous_frame, tlbc_index); + return parse_code_object(unwinder, result, code_object,instruction_pointer, previous_frame, tlbc_index); } static int @@ -2053,26 +2176,10 @@ parse_async_frame_object( } *previous_frame = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.previous); - - *code_object = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable); - // Strip tag bits for consistent comparison - *code_object &= ~Py_TAG_BITS; - assert(code_object != NULL); - if ((void*)*code_object == NULL) { - return 0; - } - - if (GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) == FRAME_OWNED_BY_CSTACK || - GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) == FRAME_OWNED_BY_INTERPRETER) { - return 0; // C frame - } - - if (GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) != FRAME_OWNED_BY_GENERATOR - && GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) != FRAME_OWNED_BY_THREAD) { - PyErr_Format(PyExc_RuntimeError, "Unhandled frame owner %d.\n", - GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner)); - set_exception_cause(unwinder, PyExc_RuntimeError, "Unhandled frame owner type in async frame"); - return -1; + *code_object = GET_MEMBER_NO_TAG(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable); + int frame_valid = is_frame_valid(unwinder, (uintptr_t)frame, *code_object); + if (frame_valid != 1) { + return frame_valid; } uintptr_t instruction_pointer = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.instr_ptr); @@ -2212,23 +2319,24 @@ append_awaited_by( return -1; } - PyObject *result_item = PyTuple_New(2); - if (result_item == NULL) { + PyObject* awaited_by_for_thread = PyList_New(0); + if (awaited_by_for_thread == NULL) { Py_DECREF(tid_py); - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by result tuple"); + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by thread list"); return -1; } - PyObject* awaited_by_for_thread = PyList_New(0); - if (awaited_by_for_thread == NULL) { + RemoteDebuggingState *state = RemoteDebugging_GetStateFromObject((PyObject*)unwinder); + PyObject *result_item = PyStructSequence_New(state->AwaitedInfo_Type); + if (result_item == NULL) { Py_DECREF(tid_py); - Py_DECREF(result_item); - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by thread list"); + Py_DECREF(awaited_by_for_thread); + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create AwaitedInfo"); return -1; } - PyTuple_SET_ITEM(result_item, 0, tid_py); // steals ref - PyTuple_SET_ITEM(result_item, 1, awaited_by_for_thread); // steals ref + PyStructSequence_SetItem(result_item, 0, tid_py); // steals ref + PyStructSequence_SetItem(result_item, 1, awaited_by_for_thread); // steals ref if (PyList_Append(result, result_item)) { Py_DECREF(result_item); set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append awaited_by result item"); @@ -2352,14 +2460,15 @@ unwind_stack_for_thread( goto error; } - result = PyTuple_New(2); + RemoteDebuggingState *state = RemoteDebugging_GetStateFromObject((PyObject*)unwinder); + result = PyStructSequence_New(state->ThreadInfo_Type); if (result == NULL) { - set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create thread unwind result tuple"); + set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create ThreadInfo"); goto error; } - PyTuple_SET_ITEM(result, 0, thread_id); // Steals reference - PyTuple_SET_ITEM(result, 1, frame_info); // Steals reference + PyStructSequence_SetItem(result, 0, thread_id); // Steals reference + PyStructSequence_SetItem(result, 1, frame_info); // Steals reference cleanup_stack_chunks(&chunks); return result; @@ -2414,6 +2523,7 @@ _remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self, /*[clinic end generated code: output=3982f2a7eba49334 input=48a762566b828e91]*/ { self->debug = debug; + self->cached_state = NULL; if (_Py_RemoteDebug_InitProcHandle(&self->handle, pid) < 0) { set_exception_cause(self, PyExc_RuntimeError, "Failed to initialize process handle"); return -1; @@ -2805,8 +2915,9 @@ static PyMethodDef RemoteUnwinder_methods[] = { }; static void -RemoteUnwinder_dealloc(RemoteUnwinderObject *self) +RemoteUnwinder_dealloc(PyObject *op) { + RemoteUnwinderObject *self = RemoteUnwinder_CAST(op); PyTypeObject *tp = Py_TYPE(self); if (self->code_object_cache) { _Py_hashtable_destroy(self->code_object_cache); @@ -2860,6 +2971,47 @@ _remote_debugging_exec(PyObject *m) if (PyModule_AddType(m, st->RemoteDebugging_Type) < 0) { return -1; } + + // Initialize structseq types + st->TaskInfo_Type = PyStructSequence_NewType(&TaskInfo_desc); + if (st->TaskInfo_Type == NULL) { + return -1; + } + if (PyModule_AddType(m, st->TaskInfo_Type) < 0) { + return -1; + } + + st->FrameInfo_Type = PyStructSequence_NewType(&FrameInfo_desc); + if (st->FrameInfo_Type == NULL) { + return -1; + } + if (PyModule_AddType(m, st->FrameInfo_Type) < 0) { + return -1; + } + + st->CoroInfo_Type = PyStructSequence_NewType(&CoroInfo_desc); + if (st->CoroInfo_Type == NULL) { + return -1; + } + if (PyModule_AddType(m, st->CoroInfo_Type) < 0) { + return -1; + } + + st->ThreadInfo_Type = PyStructSequence_NewType(&ThreadInfo_desc); + if (st->ThreadInfo_Type == NULL) { + return -1; + } + if (PyModule_AddType(m, st->ThreadInfo_Type) < 0) { + return -1; + } + + st->AwaitedInfo_Type = PyStructSequence_NewType(&AwaitedInfo_desc); + if (st->AwaitedInfo_Type == NULL) { + return -1; + } + if (PyModule_AddType(m, st->AwaitedInfo_Type) < 0) { + return -1; + } #ifdef Py_GIL_DISABLED PyUnstable_Module_SetGIL(m, Py_MOD_GIL_NOT_USED); #endif @@ -2878,6 +3030,11 @@ remote_debugging_traverse(PyObject *mod, visitproc visit, void *arg) { RemoteDebuggingState *state = RemoteDebugging_GetState(mod); Py_VISIT(state->RemoteDebugging_Type); + Py_VISIT(state->TaskInfo_Type); + Py_VISIT(state->FrameInfo_Type); + Py_VISIT(state->CoroInfo_Type); + Py_VISIT(state->ThreadInfo_Type); + Py_VISIT(state->AwaitedInfo_Type); return 0; } @@ -2886,6 +3043,11 @@ remote_debugging_clear(PyObject *mod) { RemoteDebuggingState *state = RemoteDebugging_GetState(mod); Py_CLEAR(state->RemoteDebugging_Type); + Py_CLEAR(state->TaskInfo_Type); + Py_CLEAR(state->FrameInfo_Type); + Py_CLEAR(state->CoroInfo_Type); + Py_CLEAR(state->ThreadInfo_Type); + Py_CLEAR(state->AwaitedInfo_Type); return 0; } diff --git a/Objects/codeobject.c b/Objects/codeobject.c index ee869d991d9..34b50ef97d5 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -1714,7 +1714,7 @@ static int identify_unbound_names(PyThreadState *tstate, PyCodeObject *co, PyObject *globalnames, PyObject *attrnames, PyObject *globalsns, PyObject *builtinsns, - struct co_unbound_counts *counts) + struct co_unbound_counts *counts, int *p_numdupes) { // This function is inspired by inspect.getclosurevars(). // It would be nicer if we had something similar to co_localspluskinds, @@ -1729,6 +1729,7 @@ identify_unbound_names(PyThreadState *tstate, PyCodeObject *co, assert(builtinsns == NULL || PyDict_Check(builtinsns)); assert(counts == NULL || counts->total == 0); struct co_unbound_counts unbound = {0}; + int numdupes = 0; Py_ssize_t len = Py_SIZE(co); for (int i = 0; i < len; i += _PyInstruction_GetLength(co, i)) { _Py_CODEUNIT inst = _Py_GetBaseCodeUnit(co, i); @@ -1747,6 +1748,12 @@ identify_unbound_names(PyThreadState *tstate, PyCodeObject *co, if (PySet_Add(attrnames, name) < 0) { return -1; } + if (PySet_Contains(globalnames, name)) { + if (_PyErr_Occurred(tstate)) { + return -1; + } + numdupes += 1; + } } else if (inst.op.code == LOAD_GLOBAL) { int oparg = GET_OPARG(co, i, inst.op.arg); @@ -1778,11 +1785,20 @@ identify_unbound_names(PyThreadState *tstate, PyCodeObject *co, if (PySet_Add(globalnames, name) < 0) { return -1; } + if (PySet_Contains(attrnames, name)) { + if (_PyErr_Occurred(tstate)) { + return -1; + } + numdupes += 1; + } } } if (counts != NULL) { *counts = unbound; } + if (p_numdupes != NULL) { + *p_numdupes = numdupes; + } return 0; } @@ -1932,20 +1948,24 @@ _PyCode_SetUnboundVarCounts(PyThreadState *tstate, // Fill in unbound.globals and unbound.numattrs. struct co_unbound_counts unbound = {0}; + int numdupes = 0; Py_BEGIN_CRITICAL_SECTION(co); res = identify_unbound_names( tstate, co, globalnames, attrnames, globalsns, builtinsns, - &unbound); + &unbound, &numdupes); Py_END_CRITICAL_SECTION(); if (res < 0) { goto finally; } assert(unbound.numunknown == 0); - assert(unbound.total <= counts->unbound.total); + assert(unbound.total - numdupes <= counts->unbound.total); assert(counts->unbound.numunknown == counts->unbound.total); - unbound.numunknown = counts->unbound.total - unbound.total; - unbound.total = counts->unbound.total; + // There may be a name that is both a global and an attr. + int totalunbound = counts->unbound.total + numdupes; + unbound.numunknown = totalunbound - unbound.total; + unbound.total = totalunbound; counts->unbound = unbound; + counts->total += numdupes; res = 0; finally: diff --git a/Objects/dictobject.c b/Objects/dictobject.c index fd8ccf56324..72901377cbb 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -3858,7 +3858,7 @@ dict_dict_merge(PyInterpreterState *interp, PyDictObject *mp, PyDictObject *othe } } - Py_ssize_t orig_size = other->ma_keys->dk_nentries; + Py_ssize_t orig_size = other->ma_used; Py_ssize_t pos = 0; Py_hash_t hash; PyObject *key, *value; @@ -3892,7 +3892,7 @@ dict_dict_merge(PyInterpreterState *interp, PyDictObject *mp, PyDictObject *othe if (err != 0) return -1; - if (orig_size != other->ma_keys->dk_nentries) { + if (orig_size != other->ma_used) { PyErr_SetString(PyExc_RuntimeError, "dict mutated during update"); return -1; diff --git a/Objects/longobject.c b/Objects/longobject.c index 2b533312fee..dfa02851cd8 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -3772,9 +3772,11 @@ long_add(PyLongObject *a, PyLongObject *b) } PyObject * -_PyLong_Add(PyLongObject *a, PyLongObject *b) +_PyCompactLong_Add(PyLongObject *a, PyLongObject *b) { - return (PyObject*)long_add(a, b); + assert(_PyLong_BothAreCompact(a, b)); + stwodigits z = medium_value(a) + medium_value(b); + return (PyObject *)_PyLong_FromSTwoDigits(z); } static PyObject * @@ -3815,9 +3817,10 @@ long_sub(PyLongObject *a, PyLongObject *b) } PyObject * -_PyLong_Subtract(PyLongObject *a, PyLongObject *b) +_PyCompactLong_Subtract(PyLongObject *a, PyLongObject *b) { - return (PyObject*)long_sub(a, b); + assert(_PyLong_BothAreCompact(a, b)); + return (PyObject *)_PyLong_FromSTwoDigits(medium_value(a) - medium_value(b)); } static PyObject * @@ -4262,9 +4265,11 @@ long_mul(PyLongObject *a, PyLongObject *b) } PyObject * -_PyLong_Multiply(PyLongObject *a, PyLongObject *b) +_PyCompactLong_Multiply(PyLongObject *a, PyLongObject *b) { - return (PyObject*)long_mul(a, b); + assert(_PyLong_BothAreCompact(a, b)); + stwodigits v = medium_value(a) * medium_value(b); + return (PyObject *)_PyLong_FromSTwoDigits(v); } static PyObject * diff --git a/PCbuild/build.bat b/PCbuild/build.bat index 2f358991e48..60235704886 100644 --- a/PCbuild/build.bat +++ b/PCbuild/build.bat @@ -33,7 +33,7 @@ echo. -k Attempt to kill any running Pythons before building (usually done echo. automatically by the pythoncore project) echo. --pgo Build with Profile-Guided Optimization. This flag echo. overrides -c and -d -echo. --disable-gil Enable experimental support for running without the GIL. +echo. --disable-gil Enable support for running without the GIL. echo. --test-marker Enable the test marker within the build. echo. --regen Regenerate all opcodes, grammar and tokens. echo. --experimental-jit Enable the experimental just-in-time compiler. diff --git a/Parser/action_helpers.c b/Parser/action_helpers.c index 3bcc0870882..0d362bf7a91 100644 --- a/Parser/action_helpers.c +++ b/Parser/action_helpers.c @@ -965,7 +965,7 @@ _PyPegen_check_fstring_conversion(Parser *p, Token* conv_token, expr_ty conv) if (conv_token->lineno != conv->lineno || conv_token->end_col_offset != conv->col_offset) { return RAISE_SYNTAX_ERROR_KNOWN_RANGE( conv_token, conv, - "%c-string: conversion type must come right after the exclamanation mark", + "%c-string: conversion type must come right after the exclamation mark", TOK_GET_STRING_PREFIX(p->tok) ); } diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 032e76f72af..971e97a5784 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -582,9 +582,10 @@ dummy_func( PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + DEOPT_IF(!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)); STAT_INC(BINARY_OP, hit); - PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); + PyObject *res_o = _PyCompactLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); INPUTS_DEAD(); @@ -597,9 +598,10 @@ dummy_func( PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + DEOPT_IF(!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)); STAT_INC(BINARY_OP, hit); - PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); + PyObject *res_o = _PyCompactLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); INPUTS_DEAD(); @@ -612,9 +614,10 @@ dummy_func( PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + DEOPT_IF(!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)); STAT_INC(BINARY_OP, hit); - PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); + PyObject *res_o = _PyCompactLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); INPUTS_DEAD(); diff --git a/Python/crossinterp.c b/Python/crossinterp.c index 39c7ea69890..0bd267e07d5 100644 --- a/Python/crossinterp.c +++ b/Python/crossinterp.c @@ -2619,7 +2619,9 @@ _PyXI_Enter(_PyXI_session *session, PyInterpreterState *interp, PyObject *nsupdates, _PyXI_session_result *result) { - PyThreadState *tstate = _PyThreadState_GET(); +#ifndef NDEBUG + PyThreadState *tstate = _PyThreadState_GET(); // Only used for asserts +#endif // Convert the attrs for cross-interpreter use. _PyXI_namespace *sharedns = NULL; @@ -2661,7 +2663,9 @@ _PyXI_Enter(_PyXI_session *session, _enter_session(session, interp); _PyXI_failure override = XI_FAILURE_INIT; override.code = _PyXI_ERR_UNCAUGHT_EXCEPTION; +#ifndef NDEBUG tstate = _PyThreadState_GET(); +#endif // Ensure this thread owns __main__. if (_PyInterpreterState_SetRunningMain(interp) < 0) { @@ -2697,7 +2701,9 @@ error: // Exit the session. _exit_session(session); +#ifndef NDEBUG tstate = _PyThreadState_GET(); +#endif if (sharedns != NULL) { _destroy_sharedns(sharedns); diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 4f772f916d1..dbfb2391bf0 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -880,10 +880,12 @@ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + if (!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } STAT_INC(BINARY_OP, hit); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o = _PyCompactLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); if (res_o == NULL) { @@ -908,10 +910,12 @@ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + if (!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } STAT_INC(BINARY_OP, hit); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o = _PyCompactLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); if (res_o == NULL) { @@ -936,10 +940,12 @@ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + if (!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)) { + UOP_STAT_INC(uopcode, miss); + JUMP_TO_JUMP_TARGET(); + } STAT_INC(BINARY_OP, hit); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o = _PyCompactLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); if (res_o == NULL) { diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 5ac519bb1b6..2cf027c539b 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -182,10 +182,13 @@ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + if (!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)) { + UPDATE_MISS_STATS(BINARY_OP); + assert(_PyOpcode_Deopt[opcode] == (BINARY_OP)); + JUMP_TO_PREDICTED(BINARY_OP); + } STAT_INC(BINARY_OP, hit); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o = _PyCompactLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); if (res_o == NULL) { @@ -507,10 +510,13 @@ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + if (!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)) { + UPDATE_MISS_STATS(BINARY_OP); + assert(_PyOpcode_Deopt[opcode] == (BINARY_OP)); + JUMP_TO_PREDICTED(BINARY_OP); + } STAT_INC(BINARY_OP, hit); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o = _PyCompactLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); if (res_o == NULL) { @@ -1088,10 +1094,13 @@ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right); assert(PyLong_CheckExact(left_o)); assert(PyLong_CheckExact(right_o)); + if (!_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o)) { + UPDATE_MISS_STATS(BINARY_OP); + assert(_PyOpcode_Deopt[opcode] == (BINARY_OP)); + JUMP_TO_PREDICTED(BINARY_OP); + } STAT_INC(BINARY_OP, hit); - _PyFrame_SetStackPointer(frame, stack_pointer); - PyObject *res_o = _PyLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); - stack_pointer = _PyFrame_GetStackPointer(frame); + PyObject *res_o = _PyCompactLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o); PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc); PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc); if (res_o == NULL) { diff --git a/Python/getversion.c b/Python/getversion.c index 226b2f999a6..8d8bc6ea700 100644 --- a/Python/getversion.c +++ b/Python/getversion.c @@ -15,7 +15,7 @@ void _Py_InitVersion(void) } initialized = 1; #ifdef Py_GIL_DISABLED - const char *buildinfo_format = "%.80s experimental free-threading build (%.80s) %.80s"; + const char *buildinfo_format = "%.80s free-threading build (%.80s) %.80s"; #else const char *buildinfo_format = "%.80s (%.80s) %.80s"; #endif diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index babd3e46b8d..5a9a3a943a7 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -222,60 +222,15 @@ dummy_func(void) { } op(_BINARY_OP_ADD_INT, (left, right -- res)) { - if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) { - assert(PyLong_CheckExact(sym_get_const(ctx, left))); - assert(PyLong_CheckExact(sym_get_const(ctx, right))); - PyObject *temp = _PyLong_Add((PyLongObject *)sym_get_const(ctx, left), - (PyLongObject *)sym_get_const(ctx, right)); - if (temp == NULL) { - goto error; - } - res = sym_new_const(ctx, temp); - Py_DECREF(temp); - // TODO gh-115506: - // replace opcode with constant propagated one and add tests! - } - else { - res = sym_new_type(ctx, &PyLong_Type); - } + res = sym_new_type(ctx, &PyLong_Type); } op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) { - if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) { - assert(PyLong_CheckExact(sym_get_const(ctx, left))); - assert(PyLong_CheckExact(sym_get_const(ctx, right))); - PyObject *temp = _PyLong_Subtract((PyLongObject *)sym_get_const(ctx, left), - (PyLongObject *)sym_get_const(ctx, right)); - if (temp == NULL) { - goto error; - } - res = sym_new_const(ctx, temp); - Py_DECREF(temp); - // TODO gh-115506: - // replace opcode with constant propagated one and add tests! - } - else { - res = sym_new_type(ctx, &PyLong_Type); - } + res = sym_new_type(ctx, &PyLong_Type); } op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) { - if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) { - assert(PyLong_CheckExact(sym_get_const(ctx, left))); - assert(PyLong_CheckExact(sym_get_const(ctx, right))); - PyObject *temp = _PyLong_Multiply((PyLongObject *)sym_get_const(ctx, left), - (PyLongObject *)sym_get_const(ctx, right)); - if (temp == NULL) { - goto error; - } - res = sym_new_const(ctx, temp); - Py_DECREF(temp); - // TODO gh-115506: - // replace opcode with constant propagated one and add tests! - } - else { - res = sym_new_type(ctx, &PyLong_Type); - } + res = sym_new_type(ctx, &PyLong_Type); } op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) { @@ -717,6 +672,16 @@ dummy_func(void) { sym_set_type(callable, &PyFunction_Type); } + op(_CHECK_METHOD_VERSION, (func_version/2, callable, null, unused[oparg] -- callable, null, unused[oparg])) { + if (sym_is_const(ctx, callable) && sym_matches_type(callable, &PyMethod_Type)) { + PyMethodObject *method = (PyMethodObject *)sym_get_const(ctx, callable); + assert(PyMethod_Check(method)); + REPLACE_OP(this_instr, _CHECK_FUNCTION_VERSION_INLINE, 0, func_version); + this_instr->operand1 = (uintptr_t)method->im_func; + } + sym_set_type(callable, &PyMethod_Type); + } + op(_CHECK_FUNCTION_EXACT_ARGS, (callable, self_or_null, unused[oparg] -- callable, self_or_null, unused[oparg])) { assert(sym_matches_type(callable, &PyFunction_Type)); if (sym_is_const(ctx, callable)) { diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index adab110c5ce..8c4f0399c75 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -319,86 +319,29 @@ } case _BINARY_OP_MULTIPLY_INT: { - JitOptSymbol *right; - JitOptSymbol *left; JitOptSymbol *res; - right = stack_pointer[-1]; - left = stack_pointer[-2]; - if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) { - assert(PyLong_CheckExact(sym_get_const(ctx, left))); - assert(PyLong_CheckExact(sym_get_const(ctx, right))); - PyObject *temp = _PyLong_Multiply((PyLongObject *)sym_get_const(ctx, left), - (PyLongObject *)sym_get_const(ctx, right)); - if (temp == NULL) { - goto error; - } - res = sym_new_const(ctx, temp); - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); - Py_DECREF(temp); - } - else { - res = sym_new_type(ctx, &PyLong_Type); - stack_pointer += -1; - } - stack_pointer[-1] = res; + res = sym_new_type(ctx, &PyLong_Type); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); break; } case _BINARY_OP_ADD_INT: { - JitOptSymbol *right; - JitOptSymbol *left; JitOptSymbol *res; - right = stack_pointer[-1]; - left = stack_pointer[-2]; - if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) { - assert(PyLong_CheckExact(sym_get_const(ctx, left))); - assert(PyLong_CheckExact(sym_get_const(ctx, right))); - PyObject *temp = _PyLong_Add((PyLongObject *)sym_get_const(ctx, left), - (PyLongObject *)sym_get_const(ctx, right)); - if (temp == NULL) { - goto error; - } - res = sym_new_const(ctx, temp); - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); - Py_DECREF(temp); - } - else { - res = sym_new_type(ctx, &PyLong_Type); - stack_pointer += -1; - } - stack_pointer[-1] = res; + res = sym_new_type(ctx, &PyLong_Type); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); break; } case _BINARY_OP_SUBTRACT_INT: { - JitOptSymbol *right; - JitOptSymbol *left; JitOptSymbol *res; - right = stack_pointer[-1]; - left = stack_pointer[-2]; - if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) { - assert(PyLong_CheckExact(sym_get_const(ctx, left))); - assert(PyLong_CheckExact(sym_get_const(ctx, right))); - PyObject *temp = _PyLong_Subtract((PyLongObject *)sym_get_const(ctx, left), - (PyLongObject *)sym_get_const(ctx, right)); - if (temp == NULL) { - goto error; - } - res = sym_new_const(ctx, temp); - stack_pointer[-2] = res; - stack_pointer += -1; - assert(WITHIN_STACK_BOUNDS()); - Py_DECREF(temp); - } - else { - res = sym_new_type(ctx, &PyLong_Type); - stack_pointer += -1; - } - stack_pointer[-1] = res; + res = sym_new_type(ctx, &PyLong_Type); + stack_pointer[-2] = res; + stack_pointer += -1; + assert(WITHIN_STACK_BOUNDS()); break; } @@ -1890,6 +1833,16 @@ } case _CHECK_METHOD_VERSION: { + JitOptSymbol *callable; + callable = stack_pointer[-2 - oparg]; + uint32_t func_version = (uint32_t)this_instr->operand0; + if (sym_is_const(ctx, callable) && sym_matches_type(callable, &PyMethod_Type)) { + PyMethodObject *method = (PyMethodObject *)sym_get_const(ctx, callable); + assert(PyMethod_Check(method)); + REPLACE_OP(this_instr, _CHECK_FUNCTION_VERSION_INLINE, 0, func_version); + this_instr->operand1 = (uintptr_t)method->im_func; + } + sym_set_type(callable, &PyMethod_Type); break; } diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index fca9b29f9eb..ca6d0301f35 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -635,6 +635,10 @@ NON_ESCAPING_FUNCTIONS = ( "_PyLong_IsNegative", "_PyLong_IsNonNegativeCompact", "_PyLong_IsZero", + "_PyLong_BothAreCompact", + "_PyCompactLong_Add", + "_PyCompactLong_Multiply", + "_PyCompactLong_Subtract", "_PyManagedDictPointer_IsValues", "_PyObject_GC_IS_SHARED", "_PyObject_GC_IS_TRACKED", diff --git a/configure b/configure index fef9f2d7da9..2b1482d9264 100755 --- a/configure +++ b/configure @@ -1826,8 +1826,8 @@ Optional Features: no) --enable-profiling enable C-level code profiling with gprof (default is no) - --disable-gil enable experimental support for running without the - GIL (default is no) + --disable-gil enable support for running without the GIL (default + is no) --enable-pystats enable internal statistics gathering (default is no) --enable-optimizations enable expensive, stable optimizations (PGO, etc.) (default is no) @@ -23849,7 +23849,7 @@ fi -ac_fn_check_decl "$LINENO" "MAXLOGNAME" "ac_cv_have_decl_MAXLOGNAME" "#include <sys/params.h> +ac_fn_check_decl "$LINENO" "MAXLOGNAME" "ac_cv_have_decl_MAXLOGNAME" "#include <sys/param.h> " "$ac_c_undeclared_builtin_options" "CFLAGS" if test "x$ac_cv_have_decl_MAXLOGNAME" = xyes then : diff --git a/configure.ac b/configure.ac index cc37a636c52..7dbb332f1d3 100644 --- a/configure.ac +++ b/configure.ac @@ -1716,7 +1716,7 @@ ABI_THREAD="" # --disable-gil AC_MSG_CHECKING([for --disable-gil]) AC_ARG_ENABLE([gil], - [AS_HELP_STRING([--disable-gil], [enable experimental support for running without the GIL (default is no)])], + [AS_HELP_STRING([--disable-gil], [enable support for running without the GIL (default is no)])], [AS_VAR_IF([enable_gil], [yes], [disable_gil=no], [disable_gil=yes])], [disable_gil=no] ) AC_MSG_RESULT([$disable_gil]) @@ -5542,7 +5542,7 @@ AC_CHECK_DECL([MAXLOGNAME], [AC_DEFINE([HAVE_MAXLOGNAME], [1], [Define if you have the 'MAXLOGNAME' constant.])], [], - [@%:@include <sys/params.h>]) + [@%:@include <sys/param.h>]) AC_CHECK_DECLS([UT_NAMESIZE], [AC_DEFINE([HAVE_UT_NAMESIZE], [1], |