diff options
133 files changed, 2289 insertions, 2205 deletions
diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index c133193d30c..95133c1338b 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -14,10 +14,12 @@ on: - "Lib/tomllib/**" - "Misc/mypy/**" - "Tools/build/compute-changes.py" + - "Tools/build/deepfreeze.py" - "Tools/build/generate_sbom.py" - "Tools/build/generate-build-details.py" - "Tools/build/verify_ensurepip_wheels.py" - "Tools/build/update_file.py" + - "Tools/build/umarshal.py" - "Tools/cases_generator/**" - "Tools/clinic/**" - "Tools/jit/**" diff --git a/.github/workflows/reusable-context.yml b/.github/workflows/reusable-context.yml index 73dc254edc5..d2668ddcac1 100644 --- a/.github/workflows/reusable-context.yml +++ b/.github/workflows/reusable-context.yml @@ -97,6 +97,7 @@ jobs: run: python Tools/build/compute-changes.py env: GITHUB_DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GITHUB_EVENT_NAME: ${{ github.event_name }} CCF_TARGET_REF: ${{ github.base_ref || github.event.repository.default_branch }} CCF_HEAD_REF: ${{ github.event.pull_request.head.sha || github.sha }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7ad829c94d5..3632cf39203 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -47,6 +47,8 @@ repos: exclude: Lib/test/tokenizedata/coding20731.py - id: trailing-whitespace types_or: [c, inc, python, rst] + - id: trailing-whitespace + files: '\.(gram)$' - repo: https://github.com/python-jsonschema/check-jsonschema rev: 0.33.0 diff --git a/Doc/c-api/gcsupport.rst b/Doc/c-api/gcsupport.rst index d1f0982b818..3e23605778f 100644 --- a/Doc/c-api/gcsupport.rst +++ b/Doc/c-api/gcsupport.rst @@ -180,9 +180,9 @@ provided. In order to use this macro, the :c:member:`~PyTypeObject.tp_traverse` must name its arguments exactly *visit* and *arg*: -.. c:function:: void Py_VISIT(PyObject *o) +.. c:macro:: Py_VISIT(o) - If *o* is not ``NULL``, call the *visit* callback, with arguments *o* + If the :c:expr:`PyObject *` *o* is not ``NULL``, call the *visit* callback, with arguments *o* and *arg*. If *visit* returns a non-zero value, then return it. Using this macro, :c:member:`~PyTypeObject.tp_traverse` handlers look like:: diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst index 90767b12471..9c866438b48 100644 --- a/Doc/c-api/init.rst +++ b/Doc/c-api/init.rst @@ -919,8 +919,36 @@ Note that the ``PyGILState_*`` functions assume there is only one global interpreter (created automatically by :c:func:`Py_Initialize`). Python supports the creation of additional interpreters (using :c:func:`Py_NewInterpreter`), but mixing multiple interpreters and the -``PyGILState_*`` API is unsupported. +``PyGILState_*`` API is unsupported. This is because :c:func:`PyGILState_Ensure` +and similar functions default to :term:`attaching <attached thread state>` a +:term:`thread state` for the main interpreter, meaning that the thread can't safely +interact with the calling subinterpreter. + +Supporting subinterpreters in non-Python threads +------------------------------------------------ + +If you would like to support subinterpreters with non-Python created threads, you +must use the ``PyThreadState_*`` API instead of the traditional ``PyGILState_*`` +API. + +In particular, you must store the interpreter state from the calling +function and pass it to :c:func:`PyThreadState_New`, which will ensure that +the :term:`thread state` is targeting the correct interpreter:: + + /* The return value of PyInterpreterState_Get() from the + function that created this thread. */ + PyInterpreterState *interp = ThreadData->interp; + PyThreadState *tstate = PyThreadState_New(interp); + PyThreadState_Swap(tstate); + + /* GIL of the subinterpreter is now held. + Perform Python actions here. */ + result = CallSomeFunction(); + /* evaluate result or handle exception */ + /* Destroy the thread state. No Python API allowed beyond this point. */ + PyThreadState_Clear(tstate); + PyThreadState_DeleteCurrent(); .. _fork-and-threads: @@ -1097,6 +1125,10 @@ code, or when embedding the Python interpreter: .. seealso: :c:func:`PyEval_ReleaseThread` + .. note:: + Similar to :c:func:`PyGILState_Ensure`, this function will hang the + thread if the runtime is finalizing. + The following functions use thread-local storage, and are not compatible with sub-interpreters: @@ -1123,10 +1155,10 @@ with sub-interpreters: When the function returns, there will be an :term:`attached thread state` and the thread will be able to call arbitrary Python code. Failure is a fatal error. - .. note:: - Calling this function from a thread when the runtime is finalizing will - hang the thread until the program exits, even if the thread was not - created by Python. Refer to + .. warning:: + Calling this function when the runtime is finalizing is unsafe. Doing + so will either hang the thread until the program ends, or fully crash + the interpreter in rare cases. Refer to :ref:`cautions-regarding-runtime-finalization` for more details. .. versionchanged:: 3.14 @@ -1143,7 +1175,6 @@ with sub-interpreters: Every call to :c:func:`PyGILState_Ensure` must be matched by a call to :c:func:`PyGILState_Release` on the same thread. - .. c:function:: PyThreadState* PyGILState_GetThisThreadState() Get the :term:`attached thread state` for this thread. May return ``NULL`` if no @@ -1151,20 +1182,30 @@ with sub-interpreters: always has such a thread-state, even if no auto-thread-state call has been made on the main thread. This is mainly a helper/diagnostic function. - .. seealso: :c:func:`PyThreadState_Get`` + .. note:: + This function does not account for :term:`thread states <thread state>` created + by something other than :c:func:`PyGILState_Ensure` (such as :c:func:`PyThreadState_New`). + Prefer :c:func:`PyThreadState_Get` or :c:func:`PyThreadState_GetUnchecked` + for most cases. + .. seealso: :c:func:`PyThreadState_Get`` .. c:function:: int PyGILState_Check() Return ``1`` if the current thread is holding the :term:`GIL` and ``0`` otherwise. This function can be called from any thread at any time. - Only if it has had its Python thread state initialized and currently is - holding the :term:`GIL` will it return ``1``. + Only if it has had its :term:`thread state <attached thread state>` initialized + via :c:func:`PyGILState_Ensure` will it return ``1``. This is mainly a helper/diagnostic function. It can be useful for example in callback contexts or memory allocation functions when knowing that the :term:`GIL` is locked can allow the caller to perform sensitive actions or otherwise behave differently. + .. note:: + If the current Python process has ever created a subinterpreter, this + function will *always* return ``1``. Prefer :c:func:`PyThreadState_GetUnchecked` + for most cases. + .. versionadded:: 3.4 diff --git a/Doc/deprecations/pending-removal-in-3.15.rst b/Doc/deprecations/pending-removal-in-3.15.rst index c80588b27b6..a76d06cce12 100644 --- a/Doc/deprecations/pending-removal-in-3.15.rst +++ b/Doc/deprecations/pending-removal-in-3.15.rst @@ -20,7 +20,7 @@ Pending removal in Python 3.15 * :mod:`http.server`: - * The obsolete and rarely used :class:`~http.server.CGIHTTPRequestHandler` + * The obsolete and rarely used :class:`!CGIHTTPRequestHandler` has been deprecated since Python 3.13. No direct replacement exists. *Anything* is better than CGI to interface @@ -51,7 +51,7 @@ Pending removal in Python 3.15 * :mod:`platform`: - * :func:`~platform.java_ver` has been deprecated since Python 3.13. + * :func:`!platform.java_ver` has been deprecated since Python 3.13. This function is only useful for Jython support, has a confusing API, and is largely untested. @@ -99,8 +99,7 @@ Pending removal in Python 3.15 * :mod:`wave`: - * The :meth:`~wave.Wave_read.getmark`, :meth:`!setmark`, - and :meth:`~wave.Wave_read.getmarkers` methods of + * The ``getmark()``, ``setmark()`` and ``getmarkers()`` methods of the :class:`~wave.Wave_read` and :class:`~wave.Wave_write` classes have been deprecated since Python 3.13. diff --git a/Doc/faq/design.rst b/Doc/faq/design.rst index e2710fab9cf..c758c019ca4 100644 --- a/Doc/faq/design.rst +++ b/Doc/faq/design.rst @@ -420,10 +420,12 @@ strings representing the files in the current directory. Functions which operate on this output would generally not break if you added another file or two to the directory. -Tuples are immutable, meaning that once a tuple has been created, you can't -replace any of its elements with a new value. Lists are mutable, meaning that -you can always change a list's elements. Only immutable elements can be used as -dictionary keys, and hence only tuples and not lists can be used as keys. +Tuples are :term:`immutable`, meaning that once a tuple has been created, you can't +replace any of its elements with a new value. Lists are :term:`mutable`, meaning that +you can always change a list's elements. Only :term:`hashable` objects can +be used as dictionary keys. Most immutable types are hashable, which is why +tuples, but not lists, can be used as keys. Note, however, that a tuple is +only hashable if all of its elements are hashable. How are lists implemented in CPython? diff --git a/Doc/howto/functional.rst b/Doc/howto/functional.rst index 1f0608fb0fc..b4f3463afee 100644 --- a/Doc/howto/functional.rst +++ b/Doc/howto/functional.rst @@ -372,8 +372,8 @@ have the form:: for expr2 in sequence2 if condition2 for expr3 in sequence3 - ... if condition3 + ... for exprN in sequenceN if conditionN ) diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index 44b507a9811..7831b613bd4 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -46,10 +46,6 @@ In addition to enabling the debug mode, consider also: When the debug mode is enabled: -* asyncio checks for :ref:`coroutines that were not awaited - <asyncio-coroutine-not-scheduled>` and logs them; this mitigates - the "forgotten await" pitfall. - * Many non-threadsafe asyncio APIs (such as :meth:`loop.call_soon` and :meth:`loop.call_at` methods) raise an exception if they are called from a wrong thread. diff --git a/Doc/library/dataclasses.rst b/Doc/library/dataclasses.rst index 72612211b43..f18c7cc9c02 100644 --- a/Doc/library/dataclasses.rst +++ b/Doc/library/dataclasses.rst @@ -304,9 +304,9 @@ Module contents .. versionadded:: 3.10 - - ``doc``: optional docstring for this field. + - *doc*: optional docstring for this field. - .. versionadded:: 3.13 + .. versionadded:: 3.14 If the default value of a field is specified by a call to :func:`!field`, then the class attribute for this field will be diff --git a/Doc/library/http.server.rst b/Doc/library/http.server.rst index 54df4a7e804..063344e0284 100644 --- a/Doc/library/http.server.rst +++ b/Doc/library/http.server.rst @@ -429,8 +429,7 @@ instantiation, of which this module provides three different variants: ``'Last-Modified:'`` header with the file's modification time. Then follows a blank line signifying the end of the headers, and then the - contents of the file are output. If the file's MIME type starts with - ``text/`` the file is opened in text mode; otherwise binary mode is used. + contents of the file are output. For example usage, see the implementation of the ``test`` function in :source:`Lib/http/server.py`. @@ -459,55 +458,6 @@ such as using different index file names by overriding the class attribute :attr:`index_pages`. -.. class:: CGIHTTPRequestHandler(request, client_address, server) - - This class is used to serve either files or output of CGI scripts from the - current directory and below. Note that mapping HTTP hierarchic structure to - local directory structure is exactly as in :class:`SimpleHTTPRequestHandler`. - - .. note:: - - CGI scripts run by the :class:`CGIHTTPRequestHandler` class cannot execute - redirects (HTTP code 302), because code 200 (script output follows) is - sent prior to execution of the CGI script. This pre-empts the status - code. - - The class will however, run the CGI script, instead of serving it as a file, - if it guesses it to be a CGI script. Only directory-based CGI are used --- - the other common server configuration is to treat special extensions as - denoting CGI scripts. - - The :func:`do_GET` and :func:`do_HEAD` functions are modified to run CGI scripts - and serve the output, instead of serving files, if the request leads to - somewhere below the ``cgi_directories`` path. - - The :class:`CGIHTTPRequestHandler` defines the following data member: - - .. attribute:: cgi_directories - - This defaults to ``['/cgi-bin', '/htbin']`` and describes directories to - treat as containing CGI scripts. - - The :class:`CGIHTTPRequestHandler` defines the following method: - - .. method:: do_POST() - - This method serves the ``'POST'`` request type, only allowed for CGI - scripts. Error 501, "Can only POST to CGI scripts", is output when trying - to POST to a non-CGI url. - - Note that CGI scripts will be run with UID of user nobody, for security - reasons. Problems with the CGI script will be translated to error 403. - - .. deprecated-removed:: 3.13 3.15 - - :class:`CGIHTTPRequestHandler` is being removed in 3.15. CGI has not - been considered a good way to do things for well over a decade. This code - has been unmaintained for a while now and sees very little practical use. - Retaining it could lead to further :ref:`security considerations - <http.server-security>`. - - .. _http-server-cli: Command-line interface @@ -564,24 +514,6 @@ The following options are accepted: .. versionadded:: 3.11 -.. option:: --cgi - - :class:`CGIHTTPRequestHandler` can be enabled in the command line by passing - the ``--cgi`` option:: - - python -m http.server --cgi - - .. deprecated-removed:: 3.13 3.15 - - :mod:`http.server` command line ``--cgi`` support is being removed - because :class:`CGIHTTPRequestHandler` is being removed. - -.. warning:: - - :class:`CGIHTTPRequestHandler` and the ``--cgi`` command-line option - are not intended for use by untrusted clients and may be vulnerable - to exploitation. Always use within a secure environment. - .. option:: --tls-cert Specifies a TLS certificate chain for HTTPS connections:: diff --git a/Doc/library/json.rst b/Doc/library/json.rst index 26579ec6328..12a5a96a3c5 100644 --- a/Doc/library/json.rst +++ b/Doc/library/json.rst @@ -18,12 +18,17 @@ is a lightweight data interchange format inspired by `JavaScript <https://en.wikipedia.org/wiki/JavaScript>`_ object literal syntax (although it is not a strict subset of JavaScript [#rfc-errata]_ ). +.. note:: + The term "object" in the context of JSON processing in Python can be + ambiguous. All values in Python are objects. In JSON, an object refers to + any data wrapped in curly braces, similar to a Python dictionary. + .. warning:: Be cautious when parsing JSON data from untrusted sources. A malicious JSON string may cause the decoder to consume considerable CPU and memory resources. Limiting the size of data to be parsed is recommended. -:mod:`json` exposes an API familiar to users of the standard library +This module exposes an API familiar to users of the standard library :mod:`marshal` and :mod:`pickle` modules. Encoding basic Python object hierarchies:: @@ -60,7 +65,7 @@ Pretty printing:: "6": 7 } -Specializing JSON object encoding:: +Customizing JSON object encoding:: >>> import json >>> def custom_json(obj): @@ -83,7 +88,7 @@ Decoding JSON:: >>> json.load(io) ['streaming API'] -Specializing JSON object decoding:: +Customizing JSON object decoding:: >>> import json >>> def as_complex(dct): @@ -279,7 +284,7 @@ Basic Usage :param object_hook: If set, a function that is called with the result of - any object literal decoded (a :class:`dict`). + any JSON object literal decoded (a :class:`dict`). The return value of this function will be used instead of the :class:`dict`. This feature can be used to implement custom decoders, @@ -289,7 +294,7 @@ Basic Usage :param object_pairs_hook: If set, a function that is called with the result of - any object literal decoded with an ordered list of pairs. + any JSON object literal decoded with an ordered list of pairs. The return value of this function will be used instead of the :class:`dict`. This feature can be used to implement custom decoders. diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index 72190e97240..4509da58916 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -1342,8 +1342,9 @@ functions. .. function:: basicConfig(**kwargs) - Does basic configuration for the logging system by creating a - :class:`StreamHandler` with a default :class:`Formatter` and adding it to the + Does basic configuration for the logging system by either creating a + :class:`StreamHandler` with a default :class:`Formatter` + or using the given *formatter* instance, and adding it to the root logger. The functions :func:`debug`, :func:`info`, :func:`warning`, :func:`error` and :func:`critical` will call :func:`basicConfig` automatically if no handlers are defined for the root logger. @@ -1428,6 +1429,19 @@ functions. | | which means that it will be treated the | | | same as passing 'errors'. | +--------------+---------------------------------------------+ + | *formatter* | If specified, set this formatter instance | + | | (see :ref:`formatter-objects`) | + | | for all involved handlers. | + | | If not specified, the default is to create | + | | and use an instance of | + | | :class:`logging.Formatter` based on | + | | arguments *format*, *datefmt* and *style*. | + | | When *formatter* is specified together with | + | | any of the three arguments *format*, | + | | *datefmt* and *style*, a ``ValueError`` is | + | | raised to signal that these arguments would | + | | lose meaning otherwise. | + +--------------+---------------------------------------------+ .. versionchanged:: 3.2 The *style* argument was added. @@ -1444,6 +1458,9 @@ functions. .. versionchanged:: 3.9 The *encoding* and *errors* arguments were added. + .. versionchanged:: 3.15 + The *formatter* argument was added. + .. function:: shutdown() Informs the logging system to perform an orderly shutdown by flushing and diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst index 7d7692dea5c..86351e65dc4 100644 --- a/Doc/library/pathlib.rst +++ b/Doc/library/pathlib.rst @@ -1781,9 +1781,12 @@ The following wildcards are supported in patterns for ``?`` Matches one non-separator character. ``[seq]`` - Matches one character in *seq*. + Matches one character in *seq*, where *seq* is a sequence of characters. + Range expressions are supported; for example, ``[a-z]`` matches any lowercase ASCII letter. + Multiple ranges can be combined: ``[a-zA-Z0-9_]`` matches any ASCII letter, digit, or underscore. + ``[!seq]`` - Matches one character not in *seq*. + Matches one character not in *seq*, where *seq* follows the same rules as above. For a literal match, wrap the meta-characters in brackets. For example, ``"[?]"`` matches the character ``"?"``. diff --git a/Doc/library/platform.rst b/Doc/library/platform.rst index 5c999054323..06de152a742 100644 --- a/Doc/library/platform.rst +++ b/Doc/library/platform.rst @@ -188,24 +188,6 @@ Cross platform :attr:`processor` is resolved late instead of immediately. -Java platform -------------- - - -.. function:: java_ver(release='', vendor='', vminfo=('','',''), osinfo=('','','')) - - Version interface for Jython. - - Returns a tuple ``(release, vendor, vminfo, osinfo)`` with *vminfo* being a - tuple ``(vm_name, vm_release, vm_vendor)`` and *osinfo* being a tuple - ``(os_name, os_version, os_arch)``. Values which cannot be determined are set to - the defaults given as parameters (which all default to ``''``). - - .. deprecated-removed:: 3.13 3.15 - It was largely untested, had a confusing API, - and was only useful for Jython support. - - Windows platform ---------------- diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 1d9a655c766..3486a18b5cb 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -2269,6 +2269,18 @@ expression support in the :mod:`re` module). >>> ' 1 2 3 '.split() ['1', '2', '3'] + If *sep* is not specified or is ``None`` and *maxsplit* is ``0``, only + leading runs of consecutive whitespace are considered. + + For example:: + + >>> "".split(None, 0) + [] + >>> " ".split(None, 0) + [] + >>> " foo ".split(maxsplit=0) + ['foo '] + .. index:: single: universal newlines; str.splitlines method diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 249c0a5cb03..7edcdcabdce 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -11,6 +11,52 @@ This module constructs higher-level threading interfaces on top of the lower level :mod:`_thread` module. +.. include:: ../includes/wasm-notavail.rst + +Introduction +------------ + +The :mod:`!threading` module provides a way to run multiple `threads +<https://en.wikipedia.org/wiki/Thread_(computing)>`_ (smaller +units of a process) concurrently within a single process. It allows for the +creation and management of threads, making it possible to execute tasks in +parallel, sharing memory space. Threads are particularly useful when tasks are +I/O bound, such as file operations or making network requests, +where much of the time is spent waiting for external resources. + +A typical use case for :mod:`!threading` includes managing a pool of worker +threads that can process multiple tasks concurrently. Here's a basic example of +creating and starting threads using :class:`~threading.Thread`:: + + import threading + import time + + def crawl(link, delay=3): + print(f"crawl started for {link}") + time.sleep(delay) # Blocking I/O (simulating a network request) + print(f"crawl ended for {link}") + + links = [ + "https://python.org", + "https://docs.python.org", + "https://peps.python.org", + ] + + # Start threads for each link + threads = [] + for link in links: + # Using `args` to pass positional arguments and `kwargs` for keyword arguments + t = threading.Thread(target=crawl, args=(link,), kwargs={"delay": 2}) + threads.append(t) + + # Start each thread + for t in threads: + t.start() + + # Wait for all threads to finish + for t in threads: + t.join() + .. versionchanged:: 3.7 This module used to be optional, it is now always available. @@ -45,7 +91,25 @@ level :mod:`_thread` module. However, threading is still an appropriate model if you want to run multiple I/O-bound tasks simultaneously. -.. include:: ../includes/wasm-notavail.rst +GIL and performance considerations +---------------------------------- + +Unlike the :mod:`multiprocessing` module, which uses separate processes to +bypass the :term:`global interpreter lock` (GIL), the threading module operates +within a single process, meaning that all threads share the same memory space. +However, the GIL limits the performance gains of threading when it comes to +CPU-bound tasks, as only one thread can execute Python bytecode at a time. +Despite this, threads remain a useful tool for achieving concurrency in many +scenarios. + +As of Python 3.13, experimental :term:`free-threaded <free threading>` builds +can disable the GIL, enabling true parallel execution of threads, but this +feature is not available by default (see :pep:`703`). + +.. TODO: At some point this feature will become available by default. + +Reference +--------- This module defines the following functions: @@ -62,7 +126,7 @@ This module defines the following functions: Return the current :class:`Thread` object, corresponding to the caller's thread of control. If the caller's thread of control was not created through the - :mod:`threading` module, a dummy thread object with limited functionality is + :mod:`!threading` module, a dummy thread object with limited functionality is returned. The function ``currentThread`` is a deprecated alias for this function. @@ -157,13 +221,13 @@ This module defines the following functions: .. index:: single: trace function - Set a trace function for all threads started from the :mod:`threading` module. + Set a trace function for all threads started from the :mod:`!threading` module. The *func* will be passed to :func:`sys.settrace` for each thread, before its :meth:`~Thread.run` method is called. .. function:: settrace_all_threads(func) - Set a trace function for all threads started from the :mod:`threading` module + Set a trace function for all threads started from the :mod:`!threading` module and all Python threads that are currently executing. The *func* will be passed to :func:`sys.settrace` for each thread, before its @@ -186,13 +250,13 @@ This module defines the following functions: .. index:: single: profile function - Set a profile function for all threads started from the :mod:`threading` module. + Set a profile function for all threads started from the :mod:`!threading` module. The *func* will be passed to :func:`sys.setprofile` for each thread, before its :meth:`~Thread.run` method is called. .. function:: setprofile_all_threads(func) - Set a profile function for all threads started from the :mod:`threading` module + Set a profile function for all threads started from the :mod:`!threading` module and all Python threads that are currently executing. The *func* will be passed to :func:`sys.setprofile` for each thread, before its @@ -257,8 +321,8 @@ when implemented, are mapped to module-level functions. All of the methods described below are executed atomically. -Thread-Local Data ------------------ +Thread-local data +^^^^^^^^^^^^^^^^^ Thread-local data is data whose values are thread specific. If you have data that you want to be local to a thread, create a @@ -389,8 +453,8 @@ affects what we see:: .. _thread-objects: -Thread Objects --------------- +Thread objects +^^^^^^^^^^^^^^ The :class:`Thread` class represents an activity that is run in a separate thread of control. There are two ways to specify the activity: by passing a @@ -645,8 +709,8 @@ since it is impossible to detect the termination of alien threads. .. _lock-objects: -Lock Objects ------------- +Lock objects +^^^^^^^^^^^^ A primitive lock is a synchronization primitive that is not owned by a particular thread when locked. In Python, it is currently the lowest level @@ -738,8 +802,8 @@ All methods are executed atomically. .. _rlock-objects: -RLock Objects -------------- +RLock objects +^^^^^^^^^^^^^ A reentrant lock is a synchronization primitive that may be acquired multiple times by the same thread. Internally, it uses the concepts of "owning thread" @@ -848,8 +912,8 @@ call release as many times the lock has been acquired can lead to deadlock. .. _condition-objects: -Condition Objects ------------------ +Condition objects +^^^^^^^^^^^^^^^^^ A condition variable is always associated with some kind of lock; this can be passed in or one will be created by default. Passing one in is useful when @@ -1026,8 +1090,8 @@ item to the buffer only needs to wake up one consumer thread. .. _semaphore-objects: -Semaphore Objects ------------------ +Semaphore objects +^^^^^^^^^^^^^^^^^ This is one of the oldest synchronization primitives in the history of computer science, invented by the early Dutch computer scientist Edsger W. Dijkstra (he @@ -1107,7 +1171,7 @@ Semaphores also support the :ref:`context management protocol <with-locks>`. .. _semaphore-examples: -:class:`Semaphore` Example +:class:`Semaphore` example ^^^^^^^^^^^^^^^^^^^^^^^^^^ Semaphores are often used to guard resources with limited capacity, for example, @@ -1135,8 +1199,8 @@ causes the semaphore to be released more than it's acquired will go undetected. .. _event-objects: -Event Objects -------------- +Event objects +^^^^^^^^^^^^^ This is one of the simplest mechanisms for communication between threads: one thread signals an event and other threads wait for it. @@ -1192,8 +1256,8 @@ method. The :meth:`~Event.wait` method blocks until the flag is true. .. _timer-objects: -Timer Objects -------------- +Timer objects +^^^^^^^^^^^^^ This class represents an action that should be run only after a certain amount of time has passed --- a timer. :class:`Timer` is a subclass of :class:`Thread` @@ -1230,8 +1294,8 @@ For example:: only work if the timer is still in its waiting stage. -Barrier Objects ---------------- +Barrier objects +^^^^^^^^^^^^^^^ .. versionadded:: 3.2 diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst index 61022fe052c..dcdda1719bf 100644 --- a/Doc/library/unittest.rst +++ b/Doc/library/unittest.rst @@ -109,7 +109,7 @@ Here is a short script to test three string methods:: unittest.main() -A testcase is created by subclassing :class:`unittest.TestCase`. The three +A test case is created by subclassing :class:`unittest.TestCase`. The three individual tests are defined with methods whose names start with the letters ``test``. This naming convention informs the test runner about which methods represent tests. diff --git a/Doc/library/wave.rst b/Doc/library/wave.rst index 36c2bde87fb..a3f5bfd5e2f 100644 --- a/Doc/library/wave.rst +++ b/Doc/library/wave.rst @@ -123,26 +123,6 @@ Wave_read Objects Rewind the file pointer to the beginning of the audio stream. - The following two methods are defined for compatibility with the old :mod:`!aifc` - module, and don't do anything interesting. - - - .. method:: getmarkers() - - Returns ``None``. - - .. deprecated-removed:: 3.13 3.15 - The method only existed for compatibility with the :mod:`!aifc` module - which has been removed in Python 3.13. - - - .. method:: getmark(id) - - Raise an error. - - .. deprecated-removed:: 3.13 3.15 - The method only existed for compatibility with the :mod:`!aifc` module - which has been removed in Python 3.13. The following two methods define a term "position" which is compatible between them, and is otherwise implementation dependent. diff --git a/Doc/tools/templates/layout.html b/Doc/tools/templates/layout.html index 56023ebf962..1cb0200822d 100644 --- a/Doc/tools/templates/layout.html +++ b/Doc/tools/templates/layout.html @@ -26,11 +26,11 @@ {% endblock %} {% block extrahead %} - {% if builder == "html" and enable_analytics %} + {% if builder == "html" %} + {% if enable_analytics %} <script defer data-domain="docs.python.org" src="https://analytics.python.org/js/script.outbound-links.js"></script> - {% endif %} - <link rel="canonical" href="https://docs.python.org/3/{{pagename}}.html"> - {% if builder != "htmlhelp" %} + {% endif %} + <link rel="canonical" href="https://docs.python.org/3/{{pagename}}.html"> {% if pagename == 'whatsnew/changelog' and not embedded %} <script type="text/javascript" src="{{ pathto('_static/changelog_search.js', 1) }}"></script>{% endif %} {% endif %} diff --git a/Doc/tutorial/controlflow.rst b/Doc/tutorial/controlflow.rst index 95939242fb7..5c0e8f34bf8 100644 --- a/Doc/tutorial/controlflow.rst +++ b/Doc/tutorial/controlflow.rst @@ -999,7 +999,8 @@ scope:: 43 The above example uses a lambda expression to return a function. Another use -is to pass a small function as an argument:: +is to pass a small function as an argument. For instance, :meth:`list.sort` +takes a sorting key function *key* which can be a lambda function:: >>> pairs = [(1, 'one'), (2, 'two'), (3, 'three'), (4, 'four')] >>> pairs.sort(key=lambda pair: pair[1]) @@ -1055,7 +1056,7 @@ Here is an example of a multi-line docstring:: >>> print(my_function.__doc__) Do nothing, but document it. - No, really, it doesn't do anything. + No, really, it doesn't do anything. .. _tut-annotations: diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 38b97a34fcd..cd22148b531 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -98,8 +98,9 @@ does not affect any installs of Python runtimes. Uninstalling the Python install manager does not uninstall any Python runtimes. If you are not able to install an MSIX in your context, for example, you are -using automated deployment software that does not support it, please see -:ref:`pymanager-advancedinstall` below for more information. +using automated deployment software that does not support it, or are targeting +Windows Server 2019, please see :ref:`pymanager-advancedinstall` below for more +information. Basic Use @@ -515,6 +516,11 @@ per-machine installs to its default location in Program Files. It will attempt to modify the system :envvar:`PATH` environment variable to include this install location, but be sure to validate this on your configuration. +.. note:: + + Windows Server 2019 is the only version of Windows that CPython supports that + does not support MSIX. For Windows Server 2019, you should use the MSI. + Be aware that the MSI package does not bundle any runtimes, and so is not suitable for installs into offline environments without also creating an offline install index. See :ref:`pymanager-offline` and :ref:`pymanager-admin-config` @@ -535,13 +541,32 @@ depending on whether it was installed from python.org or through the Windows Store. Attempting to run the executable directly from Program Files is not recommended. -To programmatically install or uninstall the MSIX without using your -distribution platform's native support, the `Add-AppxPackage -<https://learn.microsoft.com/powershell/module/appx/add-appxpackage>`_ and -`Remove-AppxPackage <https://learn.microsoft.com/powershell/module/appx/remove-appxpackage>`_ -PowerShell cmdlets are simplest to use: +To programmatically install the Python install manager, it is easiest to use +WinGet, which is included with all supported versions of Windows: -.. code:: +.. code-block:: powershell + + $> winget install 9NQ7512CXL7T -e --accept-package-agreements --disable-interactivity + + # Optionally run the configuration checker and accept all changes + $> py install --configure -y + +To download the Python install manager and install on another machine, the +following WinGet command will download the required files from the Store to your +Downloads directory (add ``-d <location>`` to customize the output location). +This also generates a YAML file that appears to be unnecessary, as the +downloaded MSIX can be installed by launching or using the commands below. + +.. code-block:: powershell + + $> winget download 9NQ7512CXL7T -e --skip-license --accept-package-agreements --disable-interactivity + +To programmatically install or uninstall an MSIX using only PowerShell, the +`Add-AppxPackage <https://learn.microsoft.com/powershell/module/appx/add-appxpackage>`_ +and `Remove-AppxPackage <https://learn.microsoft.com/powershell/module/appx/remove-appxpackage>`_ +PowerShell cmdlets are recommended: + +.. code-block:: powershell $> Add-AppxPackage C:\Downloads\python-manager-25.0.msix ... @@ -555,6 +580,11 @@ to install the Python install manager for all users from the MSIX package. Users will still need to install their own copies of Python itself, as there is no way to trigger those installs without being a logged in user. +Note that the MSIX downloadable from the Store and from the Python website are +subtly different and cannot be installed at the same time. Wherever possible, +we suggest using the above commands to download the package from the Store to +reduce the risk of setting up conflicting installs. + .. _pymanager-admin-config: Administrative Configuration diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index f5e38950756..1067601c652 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -551,11 +551,12 @@ Patterns and classes If you are using classes to structure your data, you can use as a pattern the class name followed by an argument list resembling a constructor. This -pattern has the ability to capture class attributes into variables:: +pattern has the ability to capture instance attributes into variables:: class Point: - x: int - y: int + def __init__(self, x, y): + self.x = x + self.y = y def location(point): match point: diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst index b3530f75b2f..023c279979d 100644 --- a/Doc/whatsnew/3.13.rst +++ b/Doc/whatsnew/3.13.rst @@ -1871,7 +1871,7 @@ New Deprecations * :mod:`http.server`: - * Deprecate :class:`~http.server.CGIHTTPRequestHandler`, + * Deprecate :class:`!CGIHTTPRequestHandler`, to be removed in Python 3.15. Process-based CGI HTTP servers have been out of favor for a very long time. This code was outdated, unmaintained, and rarely used. @@ -1908,7 +1908,7 @@ New Deprecations * :mod:`platform`: - * Deprecate :func:`~platform.java_ver`, + * Deprecate :func:`!platform.java_ver`, to be removed in Python 3.15. This function is only useful for Jython support, has a confusing API, and is largely untested. @@ -1995,8 +1995,7 @@ New Deprecations * :mod:`wave`: - * Deprecate the :meth:`~wave.Wave_read.getmark`, :meth:`!setmark`, - and :meth:`~wave.Wave_read.getmarkers` methods of + * Deprecate the ``getmark()``, ``setmark()`` and ``getmarkers()`` methods of the :class:`~wave.Wave_read` and :class:`~wave.Wave_write` classes, to be removed in Python 3.15. (Contributed by Victor Stinner in :gh:`105096`.) diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst index 8cf5238e6cc..987cf944972 100644 --- a/Doc/whatsnew/3.15.rst +++ b/Doc/whatsnew/3.15.rst @@ -121,6 +121,23 @@ Deprecated Removed ======= +http.server +----------- + +* Removed the :class:`!CGIHTTPRequestHandler` class + and the ``--cgi`` flag from the :program:`python -m http.server` + command-line interface. They were deprecated in Python 3.13. + (Contributed by Bénédikt Tran in :gh:`133810`.) + + +platform +-------- + +* Removed the :func:`!platform.java_ver` function, + which was deprecated since Python 3.13. + (Contributed by Alexey Makridenko in :gh:`133604`.) + + sysconfig --------- @@ -144,6 +161,15 @@ typing (Contributed by Bénédikt Tran in :gh:`133823`.) +wave +---- + +* Removed the ``getmark()``, ``setmark()`` and ``getmarkers()`` methods + of the :class:`~wave.Wave_read` and :class:`~wave.Wave_write` classes, + which were deprecated since Python 3.13. + (Contributed by Bénédikt Tran in :gh:`133873`.) + + Porting to Python 3.15 ====================== diff --git a/Grammar/python.gram b/Grammar/python.gram index 3a4db11038d..de435537095 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -96,12 +96,12 @@ func_type[mod_ty]: '(' a=[type_expressions] ')' '->' b=expression NEWLINE* ENDMA statements[asdl_stmt_seq*]: a=statement+ { _PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_seq_flatten(p, a)) } -statement[asdl_stmt_seq*]: - | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) } +statement[asdl_stmt_seq*]: + | a=compound_stmt { (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a) } | a[asdl_stmt_seq*]=simple_stmts { a } single_compound_stmt[asdl_stmt_seq*]: - | a=compound_stmt { + | a=compound_stmt { _PyPegen_register_stmts(p, (asdl_stmt_seq*)_PyPegen_singleton_seq(p, a)) } statement_newline[asdl_stmt_seq*]: @@ -449,9 +449,9 @@ except_block[excepthandler_ty]: _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) } | 'except' e=expressions ':' b=block { CHECK_VERSION( - excepthandler_ty, - 14, - "except expressions without parentheses are", + excepthandler_ty, + 14, + "except expressions without parentheses are", _PyAST_ExceptHandler(e, NULL, b, EXTRA)) } | 'except' ':' b=block { _PyAST_ExceptHandler(NULL, NULL, b, EXTRA) } | invalid_except_stmt @@ -463,9 +463,9 @@ except_star_block[excepthandler_ty]: _PyAST_ExceptHandler(e, ((expr_ty) t)->v.Name.id, b, EXTRA) } | 'except' '*' e=expressions ':' b=block { CHECK_VERSION( - excepthandler_ty, - 14, - "except expressions without parentheses are", + excepthandler_ty, + 14, + "except expressions without parentheses are", _PyAST_ExceptHandler(e, NULL, b, EXTRA)) } | invalid_except_star_stmt finally_block[asdl_stmt_seq*]: @@ -977,11 +977,11 @@ tstring_middle[expr_ty]: | tstring_replacement_field | t=TSTRING_MIDDLE { _PyPegen_constant_from_token(p, t) } tstring[expr_ty] (memo): - | a=TSTRING_START b=tstring_middle* c=TSTRING_END { + | a=TSTRING_START b=tstring_middle* c=TSTRING_END { CHECK_VERSION( - expr_ty, - 14, - "t-strings are", + expr_ty, + 14, + "t-strings are", _PyPegen_template_str(p, a, (asdl_expr_seq*)b, c)) } string[expr_ty]: s[Token*]=STRING { _PyPegen_constant_from_string(p, s) } @@ -1383,11 +1383,11 @@ invalid_import: RAISE_SYNTAX_ERROR_STARTING_FROM(token, "Expected one or more names after 'import'") } invalid_dotted_as_name: | dotted_name 'as' !(NAME (',' | ')' | NEWLINE)) a=expression { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use %s as import target", _PyPegen_get_expr_name(a)) } invalid_import_from_as_name: | NAME 'as' !(NAME (',' | ')' | NEWLINE)) a=expression { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot use %s as import target", _PyPegen_get_expr_name(a)) } invalid_import_from_targets: @@ -1418,7 +1418,7 @@ invalid_except_stmt: RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized when using 'as'") } | a='except' expression ['as' NAME ] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } | a='except' NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } - | 'except' expression 'as' a=expression { + | 'except' expression 'as' a=expression ':' block { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( a, "cannot use except statement with %s", _PyPegen_get_expr_name(a)) } invalid_except_star_stmt: @@ -1426,7 +1426,7 @@ invalid_except_star_stmt: RAISE_SYNTAX_ERROR_STARTING_FROM(a, "multiple exception types must be parenthesized when using 'as'") } | a='except' '*' expression ['as' NAME ] NEWLINE { RAISE_SYNTAX_ERROR("expected ':'") } | a='except' '*' (NEWLINE | ':') { RAISE_SYNTAX_ERROR("expected one or more exception types") } - | 'except' '*' expression 'as' a=expression { + | 'except' '*' expression 'as' a=expression ':' block { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( a, "cannot use except* statement with %s", _PyPegen_get_expr_name(a)) } invalid_finally_stmt: diff --git a/Include/internal/pycore_dict.h b/Include/internal/pycore_dict.h index 754eb88a85c..25bb224921a 100644 --- a/Include/internal/pycore_dict.h +++ b/Include/internal/pycore_dict.h @@ -150,6 +150,8 @@ extern int _PyDict_Pop_KnownHash( Py_hash_t hash, PyObject **result); +extern void _PyDict_Clear_LockHeld(PyObject *op); + #ifdef Py_GIL_DISABLED PyAPI_FUNC(void) _PyDict_EnsureSharedOnRead(PyDictObject *mp); #endif diff --git a/InternalDocs/generators.md b/InternalDocs/generators.md index 87fbb912368..979a5b51521 100644 --- a/InternalDocs/generators.md +++ b/InternalDocs/generators.md @@ -28,9 +28,9 @@ interpreter state. The `frame` of a generator is embedded in the generator object struct as a [`_PyInterpreterFrame`](frames.md) (see `_PyGenObject_HEAD` in -[`pycore_genobject.h`](../Include/internal/pycore_genobject.h)). +[`pycore_interpframe_structs.h`](../Include/internal/pycore_interpframe_structs.h)). This means that we can get the frame from the generator or the generator -from the frame (see `_PyGen_GetGeneratorFromFrame` in the same file). +from the frame (see `_PyGen_GetGeneratorFromFrame` in [`pycore_genobject.h`](../Include/internal/pycore_genobject.h)). Other fields of the generator struct include metadata (such as the name of the generator function) and runtime state information (such as whether its frame is executing, suspended, cleared, etc.). diff --git a/Lib/_compat_pickle.py b/Lib/_compat_pickle.py index 439f8c02f4b..a9813264324 100644 --- a/Lib/_compat_pickle.py +++ b/Lib/_compat_pickle.py @@ -175,7 +175,6 @@ IMPORT_MAPPING.update({ 'SimpleDialog': 'tkinter.simpledialog', 'DocXMLRPCServer': 'xmlrpc.server', 'SimpleHTTPServer': 'http.server', - 'CGIHTTPServer': 'http.server', # For compatibility with broken pickles saved in old Python 3 versions 'UserDict': 'collections', 'UserList': 'collections', @@ -217,8 +216,6 @@ REVERSE_NAME_MAPPING.update({ ('DocXMLRPCServer', 'DocCGIXMLRPCRequestHandler'), ('http.server', 'SimpleHTTPRequestHandler'): ('SimpleHTTPServer', 'SimpleHTTPRequestHandler'), - ('http.server', 'CGIHTTPRequestHandler'): - ('CGIHTTPServer', 'CGIHTTPRequestHandler'), ('_socket', 'socket'): ('socket', '_socketobject'), }) diff --git a/Lib/argparse.py b/Lib/argparse.py index f688c38d0d1..d1a6350c3fd 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -205,6 +205,7 @@ class HelpFormatter(object): # =============================== # Section and indentation methods # =============================== + def _indent(self): self._current_indent += self._indent_increment self._level += 1 @@ -256,6 +257,7 @@ class HelpFormatter(object): # ======================== # Message building methods # ======================== + def start_section(self, heading): self._indent() section = self._Section(self, self._current_section, heading) @@ -299,6 +301,7 @@ class HelpFormatter(object): # ======================= # Help-formatting methods # ======================= + def format_help(self): help = self._root_section.format_help() if help: @@ -1467,6 +1470,7 @@ class _ActionsContainer(object): # ==================== # Registration methods # ==================== + def register(self, registry_name, value, object): registry = self._registries.setdefault(registry_name, {}) registry[value] = object @@ -1477,6 +1481,7 @@ class _ActionsContainer(object): # ================================== # Namespace default accessor methods # ================================== + def set_defaults(self, **kwargs): self._defaults.update(kwargs) @@ -1496,6 +1501,7 @@ class _ActionsContainer(object): # ======================= # Adding argument actions # ======================= + def add_argument(self, *args, **kwargs): """ add_argument(dest, ..., name=value, ...) @@ -1921,6 +1927,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): # ======================= # Pretty __repr__ methods # ======================= + def _get_kwargs(self): names = [ 'prog', @@ -1935,6 +1942,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): # ================================== # Optional/Positional adding methods # ================================== + def add_subparsers(self, **kwargs): if self._subparsers is not None: raise ValueError('cannot have multiple subparser arguments') @@ -1988,6 +1996,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): # ===================================== # Command line argument parsing methods # ===================================== + def parse_args(self, args=None, namespace=None): args, argv = self.parse_known_args(args, namespace) if argv: @@ -2582,6 +2591,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): # ======================== # Value conversion methods # ======================== + def _get_values(self, action, arg_strings): # optional argument produces a default when not present if not arg_strings and action.nargs == OPTIONAL: @@ -2681,6 +2691,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): # ======================= # Help-formatting methods # ======================= + def format_usage(self): formatter = self._get_formatter() formatter.add_usage(self.usage, self._actions, @@ -2718,6 +2729,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): # ===================== # Help-printing methods # ===================== + def print_usage(self, file=None): if file is None: file = _sys.stdout @@ -2739,6 +2751,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): # =============== # Exiting methods # =============== + def exit(self, status=0, message=None): if message: self._print_message(message, _sys.stderr) diff --git a/Lib/asyncio/futures.py b/Lib/asyncio/futures.py index d1df6707302..6bd00a64478 100644 --- a/Lib/asyncio/futures.py +++ b/Lib/asyncio/futures.py @@ -351,22 +351,19 @@ def _set_concurrent_future_state(concurrent, source): def _copy_future_state(source, dest): """Internal helper to copy state from another Future. - The other Future may be a concurrent.futures.Future. + The other Future must be a concurrent.futures.Future. """ - assert source.done() if dest.cancelled(): return assert not dest.done() - if source.cancelled(): + done, cancelled, result, exception = source._get_snapshot() + assert done + if cancelled: dest.cancel() + elif exception is not None: + dest.set_exception(_convert_future_exc(exception)) else: - exception = source.exception() - if exception is not None: - dest.set_exception(_convert_future_exc(exception)) - else: - result = source.result() - dest.set_result(result) - + dest.set_result(result) def _chain_future(source, destination): """Chain two futures so that when one completes, so does the other. diff --git a/Lib/compression/bz2/__init__.py b/Lib/compression/bz2.py index 16815d6cd20..16815d6cd20 100644 --- a/Lib/compression/bz2/__init__.py +++ b/Lib/compression/bz2.py diff --git a/Lib/compression/gzip/__init__.py b/Lib/compression/gzip.py index 552f48f948a..552f48f948a 100644 --- a/Lib/compression/gzip/__init__.py +++ b/Lib/compression/gzip.py diff --git a/Lib/compression/lzma/__init__.py b/Lib/compression/lzma.py index b4bc7ccb1db..b4bc7ccb1db 100644 --- a/Lib/compression/lzma/__init__.py +++ b/Lib/compression/lzma.py diff --git a/Lib/compression/zlib/__init__.py b/Lib/compression/zlib.py index 3aa7e2db90e..3aa7e2db90e 100644 --- a/Lib/compression/zlib/__init__.py +++ b/Lib/compression/zlib.py diff --git a/Lib/compression/zstd/__init__.py b/Lib/compression/zstd/__init__.py index e7b2f427164..84b25914b0a 100644 --- a/Lib/compression/zstd/__init__.py +++ b/Lib/compression/zstd/__init__.py @@ -2,28 +2,28 @@ __all__ = ( # compression.zstd - "COMPRESSION_LEVEL_DEFAULT", - "compress", - "CompressionParameter", - "decompress", - "DecompressionParameter", - "finalize_dict", - "get_frame_info", - "Strategy", - "train_dict", + 'COMPRESSION_LEVEL_DEFAULT', + 'compress', + 'CompressionParameter', + 'decompress', + 'DecompressionParameter', + 'finalize_dict', + 'get_frame_info', + 'Strategy', + 'train_dict', # compression.zstd._zstdfile - "open", - "ZstdFile", + 'open', + 'ZstdFile', # _zstd - "get_frame_size", - "zstd_version", - "zstd_version_info", - "ZstdCompressor", - "ZstdDecompressor", - "ZstdDict", - "ZstdError", + 'get_frame_size', + 'zstd_version', + 'zstd_version_info', + 'ZstdCompressor', + 'ZstdDecompressor', + 'ZstdDict', + 'ZstdError', ) import _zstd @@ -43,6 +43,7 @@ COMPRESSION_LEVEL_DEFAULT = _zstd.ZSTD_CLEVEL_DEFAULT class FrameInfo: """Information about a Zstandard frame.""" + __slots__ = 'decompressed_size', 'dictionary_id' def __init__(self, decompressed_size, dictionary_id): @@ -125,13 +126,13 @@ def finalize_dict(zstd_dict, /, samples, dict_size, level): chunks = b''.join(samples) chunk_sizes = tuple(_nbytes(sample) for sample in samples) if not chunks: - raise ValueError("The samples are empty content, can't finalize the" + raise ValueError("The samples are empty content, can't finalize the " "dictionary.") - dict_content = _zstd.finalize_dict(zstd_dict.dict_content, - chunks, chunk_sizes, - dict_size, level) + dict_content = _zstd.finalize_dict(zstd_dict.dict_content, chunks, + chunk_sizes, dict_size, level) return ZstdDict(dict_content) + def compress(data, level=None, options=None, zstd_dict=None): """Return Zstandard compressed *data* as bytes. @@ -147,6 +148,7 @@ def compress(data, level=None, options=None, zstd_dict=None): comp = ZstdCompressor(level=level, options=options, zstd_dict=zstd_dict) return comp.compress(data, mode=ZstdCompressor.FLUSH_FRAME) + def decompress(data, zstd_dict=None, options=None): """Decompress one or more frames of Zstandard compressed *data*. @@ -162,12 +164,12 @@ def decompress(data, zstd_dict=None, options=None): decomp = ZstdDecompressor(options=options, zstd_dict=zstd_dict) results.append(decomp.decompress(data)) if not decomp.eof: - raise ZstdError("Compressed data ended before the " - "end-of-stream marker was reached") + raise ZstdError('Compressed data ended before the ' + 'end-of-stream marker was reached') data = decomp.unused_data if not data: break - return b"".join(results) + return b''.join(results) class CompressionParameter(enum.IntEnum): diff --git a/Lib/compression/zstd/_zstdfile.py b/Lib/compression/zstd/_zstdfile.py index 0086c13d3c1..8770e576f50 100644 --- a/Lib/compression/zstd/_zstdfile.py +++ b/Lib/compression/zstd/_zstdfile.py @@ -4,7 +4,7 @@ from _zstd import (ZstdCompressor, ZstdDecompressor, ZstdError, ZSTD_DStreamOutSize) from compression._common import _streams -__all__ = ("ZstdFile", "open") +__all__ = ('ZstdFile', 'open') _MODE_CLOSED = 0 _MODE_READ = 1 @@ -31,15 +31,15 @@ class ZstdFile(_streams.BaseStream): FLUSH_BLOCK = ZstdCompressor.FLUSH_BLOCK FLUSH_FRAME = ZstdCompressor.FLUSH_FRAME - def __init__(self, file, /, mode="r", *, + def __init__(self, file, /, mode='r', *, level=None, options=None, zstd_dict=None): """Open a Zstandard compressed file in binary mode. *file* can be either an file-like object, or a file name to open. - *mode* can be "r" for reading (default), "w" for (over)writing, "x" for - creating exclusively, or "a" for appending. These can equivalently be - given as "rb", "wb", "xb" and "ab" respectively. + *mode* can be 'r' for reading (default), 'w' for (over)writing, 'x' for + creating exclusively, or 'a' for appending. These can equivalently be + given as 'rb', 'wb', 'xb' and 'ab' respectively. *level* is an optional int specifying the compression level to use, or COMPRESSION_LEVEL_DEFAULT if not given. @@ -57,33 +57,33 @@ class ZstdFile(_streams.BaseStream): self._buffer = None if not isinstance(mode, str): - raise ValueError("mode must be a str") + raise ValueError('mode must be a str') if options is not None and not isinstance(options, dict): - raise TypeError("options must be a dict or None") - mode = mode.removesuffix("b") # handle rb, wb, xb, ab - if mode == "r": + raise TypeError('options must be a dict or None') + mode = mode.removesuffix('b') # handle rb, wb, xb, ab + if mode == 'r': if level is not None: - raise TypeError("level is illegal in read mode") + raise TypeError('level is illegal in read mode') self._mode = _MODE_READ - elif mode in {"w", "a", "x"}: + elif mode in {'w', 'a', 'x'}: if level is not None and not isinstance(level, int): - raise TypeError("level must be int or None") + raise TypeError('level must be int or None') self._mode = _MODE_WRITE self._compressor = ZstdCompressor(level=level, options=options, zstd_dict=zstd_dict) self._pos = 0 else: - raise ValueError(f"Invalid mode: {mode!r}") + raise ValueError(f'Invalid mode: {mode!r}') if isinstance(file, (str, bytes, PathLike)): self._fp = io.open(file, f'{mode}b') self._close_fp = True - elif ((mode == 'r' and hasattr(file, "read")) - or (mode != 'r' and hasattr(file, "write"))): + elif ((mode == 'r' and hasattr(file, 'read')) + or (mode != 'r' and hasattr(file, 'write'))): self._fp = file else: - raise TypeError("file must be a file-like object " - "or a str, bytes, or PathLike object") + raise TypeError('file must be a file-like object ' + 'or a str, bytes, or PathLike object') if self._mode == _MODE_READ: raw = _streams.DecompressReader( @@ -151,22 +151,22 @@ class ZstdFile(_streams.BaseStream): return self._check_not_closed() if mode not in {self.FLUSH_BLOCK, self.FLUSH_FRAME}: - raise ValueError("Invalid mode argument, expected either " - "ZstdFile.FLUSH_FRAME or " - "ZstdFile.FLUSH_BLOCK") + raise ValueError('Invalid mode argument, expected either ' + 'ZstdFile.FLUSH_FRAME or ' + 'ZstdFile.FLUSH_BLOCK') if self._compressor.last_mode == mode: return # Flush zstd block/frame, and write. data = self._compressor.flush(mode) self._fp.write(data) - if hasattr(self._fp, "flush"): + if hasattr(self._fp, 'flush'): self._fp.flush() def read(self, size=-1): """Read up to size uncompressed bytes from the file. If size is negative or omitted, read until EOF is reached. - Returns b"" if the file is already at EOF. + Returns b'' if the file is already at EOF. """ if size is None: size = -1 @@ -178,7 +178,7 @@ class ZstdFile(_streams.BaseStream): making multiple reads from the underlying stream. Reads up to a buffer's worth of data if size is negative. - Returns b"" if the file is at EOF. + Returns b'' if the file is at EOF. """ self._check_can_read() if size < 0: @@ -293,7 +293,7 @@ class ZstdFile(_streams.BaseStream): return self._mode == _MODE_WRITE -def open(file, /, mode="rb", *, level=None, options=None, zstd_dict=None, +def open(file, /, mode='rb', *, level=None, options=None, zstd_dict=None, encoding=None, errors=None, newline=None): """Open a Zstandard compressed file in binary or text mode. @@ -301,8 +301,8 @@ def open(file, /, mode="rb", *, level=None, options=None, zstd_dict=None, in which case the named file is opened, or it can be an existing file object to read from or write to. - The mode parameter can be "r", "rb" (default), "w", "wb", "x", "xb", "a", - "ab" for binary mode, or "rt", "wt", "xt", "at" for text mode. + The mode parameter can be 'r', 'rb' (default), 'w', 'wb', 'x', 'xb', 'a', + 'ab' for binary mode, or 'rt', 'wt', 'xt', 'at' for text mode. The level, options, and zstd_dict parameters specify the settings the same as ZstdFile. @@ -323,19 +323,19 @@ def open(file, /, mode="rb", *, level=None, options=None, zstd_dict=None, behavior, and line ending(s). """ - text_mode = "t" in mode - mode = mode.replace("t", "") + text_mode = 't' in mode + mode = mode.replace('t', '') if text_mode: - if "b" in mode: - raise ValueError(f"Invalid mode: {mode!r}") + if 'b' in mode: + raise ValueError(f'Invalid mode: {mode!r}') else: if encoding is not None: - raise ValueError("Argument 'encoding' not supported in binary mode") + raise ValueError('Argument "encoding" not supported in binary mode') if errors is not None: - raise ValueError("Argument 'errors' not supported in binary mode") + raise ValueError('Argument "errors" not supported in binary mode') if newline is not None: - raise ValueError("Argument 'newline' not supported in binary mode") + raise ValueError('Argument "newline" not supported in binary mode') binary_file = ZstdFile(file, mode, level=level, options=options, zstd_dict=zstd_dict) diff --git a/Lib/concurrent/futures/_base.py b/Lib/concurrent/futures/_base.py index d98b1ebdd58..f506ce68aea 100644 --- a/Lib/concurrent/futures/_base.py +++ b/Lib/concurrent/futures/_base.py @@ -558,6 +558,33 @@ class Future(object): self._condition.notify_all() self._invoke_callbacks() + def _get_snapshot(self): + """Get a snapshot of the future's current state. + + This method atomically retrieves the state in one lock acquisition, + which is significantly faster than multiple method calls. + + Returns: + Tuple of (done, cancelled, result, exception) + - done: True if the future is done (cancelled or finished) + - cancelled: True if the future was cancelled + - result: The result if available and not cancelled + - exception: The exception if available and not cancelled + """ + # Fast path: check if already finished without lock + if self._state == FINISHED: + return True, False, self._result, self._exception + + # Need lock for other states since they can change + with self._condition: + # We have to check the state again after acquiring the lock + # because it may have changed in the meantime. + if self._state == FINISHED: + return True, False, self._result, self._exception + if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED}: + return True, True, None, None + return False, False, None, None + __class_getitem__ = classmethod(types.GenericAlias) class Executor(object): diff --git a/Lib/getpass.py b/Lib/getpass.py index f571425e541..1dd40e25e09 100644 --- a/Lib/getpass.py +++ b/Lib/getpass.py @@ -119,9 +119,9 @@ def win_getpass(prompt='Password: ', stream=None, *, echo_char=None): raise KeyboardInterrupt if c == '\b': if echo_char and pw: - msvcrt.putch('\b') - msvcrt.putch(' ') - msvcrt.putch('\b') + msvcrt.putwch('\b') + msvcrt.putwch(' ') + msvcrt.putwch('\b') pw = pw[:-1] else: pw = pw + c @@ -132,14 +132,15 @@ def win_getpass(prompt='Password: ', stream=None, *, echo_char=None): return pw -def fallback_getpass(prompt='Password: ', stream=None): +def fallback_getpass(prompt='Password: ', stream=None, *, echo_char=None): + _check_echo_char(echo_char) import warnings warnings.warn("Can not control echo on the terminal.", GetPassWarning, stacklevel=2) if not stream: stream = sys.stderr print("Warning: Password input may be echoed.", file=stream) - return _raw_input(prompt, stream) + return _raw_input(prompt, stream, echo_char=echo_char) def _check_echo_char(echo_char): diff --git a/Lib/http/client.py b/Lib/http/client.py index 33a858d34ae..e7a1c7bc3b2 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -181,11 +181,10 @@ def _strip_ipv6_iface(enc_name: bytes) -> bytes: return enc_name class HTTPMessage(email.message.Message): - # XXX The only usage of this method is in - # http.server.CGIHTTPRequestHandler. Maybe move the code there so - # that it doesn't need to be part of the public API. The API has - # never been defined so this could cause backwards compatibility - # issues. + + # The getallmatchingheaders() method was only used by the CGI handler + # that was removed in Python 3.15. However, since the public API was not + # properly defined, it will be kept for backwards compatibility reasons. def getallmatchingheaders(self, name): """Find all header lines matching a given header name. diff --git a/Lib/http/server.py b/Lib/http/server.py index 64f766f9bc2..abf9f87a1fc 100644 --- a/Lib/http/server.py +++ b/Lib/http/server.py @@ -1,29 +1,10 @@ """HTTP server classes. Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see -SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST, -and (deprecated) CGIHTTPRequestHandler for CGI scripts. +SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST. It does, however, optionally implement HTTP/1.1 persistent connections. -Notes on CGIHTTPRequestHandler ------------------------------- - -This class is deprecated. It implements GET and POST requests to cgi-bin scripts. - -If the os.fork() function is not present (Windows), subprocess.Popen() is used, -with slightly altered but never documented semantics. Use from a threaded -process is likely to trigger a warning at os.fork() time. - -In all cases, the implementation is intentionally naive -- all -requests are executed synchronously. - -SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL --- it may execute arbitrary Python code or external programs. - -Note that status code 200 is sent prior to execution of a CGI script, so -scripts cannot send other status codes such as 302 (redirect). - XXX To do: - log requests even later (to capture byte count) @@ -86,10 +67,8 @@ __all__ = [ "HTTPServer", "ThreadingHTTPServer", "HTTPSServer", "ThreadingHTTPSServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", - "CGIHTTPRequestHandler", ] -import copy import datetime import email.utils import html @@ -99,7 +78,6 @@ import itertools import mimetypes import os import posixpath -import select import shutil import socket import socketserver @@ -750,7 +728,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): f = None if os.path.isdir(path): parts = urllib.parse.urlsplit(self.path) - if not parts.path.endswith('/'): + if not parts.path.endswith(('/', '%2f', '%2F')): # redirect browser - doing basically what apache does self.send_response(HTTPStatus.MOVED_PERMANENTLY) new_parts = (parts[0], parts[1], parts[2] + '/', @@ -840,11 +818,14 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): return None list.sort(key=lambda a: a.lower()) r = [] + displaypath = self.path + displaypath = displaypath.split('#', 1)[0] + displaypath = displaypath.split('?', 1)[0] try: - displaypath = urllib.parse.unquote(self.path, + displaypath = urllib.parse.unquote(displaypath, errors='surrogatepass') except UnicodeDecodeError: - displaypath = urllib.parse.unquote(self.path) + displaypath = urllib.parse.unquote(displaypath) displaypath = html.escape(displaypath, quote=False) enc = sys.getfilesystemencoding() title = f'Directory listing for {displaypath}' @@ -890,14 +871,14 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): """ # abandon query parameters - path = path.split('?',1)[0] - path = path.split('#',1)[0] + path = path.split('#', 1)[0] + path = path.split('?', 1)[0] # Don't forget explicit trailing slash when normalizing. Issue17324 - trailing_slash = path.rstrip().endswith('/') try: path = urllib.parse.unquote(path, errors='surrogatepass') except UnicodeDecodeError: path = urllib.parse.unquote(path) + trailing_slash = path.endswith('/') path = posixpath.normpath(path) words = path.split('/') words = filter(None, words) @@ -953,56 +934,6 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): return 'application/octet-stream' -# Utilities for CGIHTTPRequestHandler - -def _url_collapse_path(path): - """ - Given a URL path, remove extra '/'s and '.' path elements and collapse - any '..' references and returns a collapsed path. - - Implements something akin to RFC-2396 5.2 step 6 to parse relative paths. - The utility of this function is limited to is_cgi method and helps - preventing some security attacks. - - Returns: The reconstituted URL, which will always start with a '/'. - - Raises: IndexError if too many '..' occur within the path. - - """ - # Query component should not be involved. - path, _, query = path.partition('?') - path = urllib.parse.unquote(path) - - # Similar to os.path.split(os.path.normpath(path)) but specific to URL - # path semantics rather than local operating system semantics. - path_parts = path.split('/') - head_parts = [] - for part in path_parts[:-1]: - if part == '..': - head_parts.pop() # IndexError if more '..' than prior parts - elif part and part != '.': - head_parts.append( part ) - if path_parts: - tail_part = path_parts.pop() - if tail_part: - if tail_part == '..': - head_parts.pop() - tail_part = '' - elif tail_part == '.': - tail_part = '' - else: - tail_part = '' - - if query: - tail_part = '?'.join((tail_part, query)) - - splitpath = ('/' + '/'.join(head_parts), tail_part) - collapsed_path = "/".join(splitpath) - - return collapsed_path - - - nobody = None def nobody_uid(): @@ -1026,274 +957,6 @@ def executable(path): return os.access(path, os.X_OK) -class CGIHTTPRequestHandler(SimpleHTTPRequestHandler): - - """Complete HTTP server with GET, HEAD and POST commands. - - GET and HEAD also support running CGI scripts. - - The POST command is *only* implemented for CGI scripts. - - """ - - def __init__(self, *args, **kwargs): - import warnings - warnings._deprecated("http.server.CGIHTTPRequestHandler", - remove=(3, 15)) - super().__init__(*args, **kwargs) - - # Determine platform specifics - have_fork = hasattr(os, 'fork') - - # Make rfile unbuffered -- we need to read one line and then pass - # the rest to a subprocess, so we can't use buffered input. - rbufsize = 0 - - def do_POST(self): - """Serve a POST request. - - This is only implemented for CGI scripts. - - """ - - if self.is_cgi(): - self.run_cgi() - else: - self.send_error( - HTTPStatus.NOT_IMPLEMENTED, - "Can only POST to CGI scripts") - - def send_head(self): - """Version of send_head that support CGI scripts""" - if self.is_cgi(): - return self.run_cgi() - else: - return SimpleHTTPRequestHandler.send_head(self) - - def is_cgi(self): - """Test whether self.path corresponds to a CGI script. - - Returns True and updates the cgi_info attribute to the tuple - (dir, rest) if self.path requires running a CGI script. - Returns False otherwise. - - If any exception is raised, the caller should assume that - self.path was rejected as invalid and act accordingly. - - The default implementation tests whether the normalized url - path begins with one of the strings in self.cgi_directories - (and the next character is a '/' or the end of the string). - - """ - collapsed_path = _url_collapse_path(self.path) - dir_sep = collapsed_path.find('/', 1) - while dir_sep > 0 and not collapsed_path[:dir_sep] in self.cgi_directories: - dir_sep = collapsed_path.find('/', dir_sep+1) - if dir_sep > 0: - head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:] - self.cgi_info = head, tail - return True - return False - - - cgi_directories = ['/cgi-bin', '/htbin'] - - def is_executable(self, path): - """Test whether argument path is an executable file.""" - return executable(path) - - def is_python(self, path): - """Test whether argument path is a Python script.""" - head, tail = os.path.splitext(path) - return tail.lower() in (".py", ".pyw") - - def run_cgi(self): - """Execute a CGI script.""" - dir, rest = self.cgi_info - path = dir + '/' + rest - i = path.find('/', len(dir)+1) - while i >= 0: - nextdir = path[:i] - nextrest = path[i+1:] - - scriptdir = self.translate_path(nextdir) - if os.path.isdir(scriptdir): - dir, rest = nextdir, nextrest - i = path.find('/', len(dir)+1) - else: - break - - # find an explicit query string, if present. - rest, _, query = rest.partition('?') - - # dissect the part after the directory name into a script name & - # a possible additional path, to be stored in PATH_INFO. - i = rest.find('/') - if i >= 0: - script, rest = rest[:i], rest[i:] - else: - script, rest = rest, '' - - scriptname = dir + '/' + script - scriptfile = self.translate_path(scriptname) - if not os.path.exists(scriptfile): - self.send_error( - HTTPStatus.NOT_FOUND, - "No such CGI script (%r)" % scriptname) - return - if not os.path.isfile(scriptfile): - self.send_error( - HTTPStatus.FORBIDDEN, - "CGI script is not a plain file (%r)" % scriptname) - return - ispy = self.is_python(scriptname) - if self.have_fork or not ispy: - if not self.is_executable(scriptfile): - self.send_error( - HTTPStatus.FORBIDDEN, - "CGI script is not executable (%r)" % scriptname) - return - - # Reference: https://www6.uniovi.es/~antonio/ncsa_httpd/cgi/env.html - # XXX Much of the following could be prepared ahead of time! - env = copy.deepcopy(os.environ) - env['SERVER_SOFTWARE'] = self.version_string() - env['SERVER_NAME'] = self.server.server_name - env['GATEWAY_INTERFACE'] = 'CGI/1.1' - env['SERVER_PROTOCOL'] = self.protocol_version - env['SERVER_PORT'] = str(self.server.server_port) - env['REQUEST_METHOD'] = self.command - uqrest = urllib.parse.unquote(rest) - env['PATH_INFO'] = uqrest - env['PATH_TRANSLATED'] = self.translate_path(uqrest) - env['SCRIPT_NAME'] = scriptname - env['QUERY_STRING'] = query - env['REMOTE_ADDR'] = self.client_address[0] - authorization = self.headers.get("authorization") - if authorization: - authorization = authorization.split() - if len(authorization) == 2: - import base64, binascii - env['AUTH_TYPE'] = authorization[0] - if authorization[0].lower() == "basic": - try: - authorization = authorization[1].encode('ascii') - authorization = base64.decodebytes(authorization).\ - decode('ascii') - except (binascii.Error, UnicodeError): - pass - else: - authorization = authorization.split(':') - if len(authorization) == 2: - env['REMOTE_USER'] = authorization[0] - # XXX REMOTE_IDENT - if self.headers.get('content-type') is None: - env['CONTENT_TYPE'] = self.headers.get_content_type() - else: - env['CONTENT_TYPE'] = self.headers['content-type'] - length = self.headers.get('content-length') - if length: - env['CONTENT_LENGTH'] = length - referer = self.headers.get('referer') - if referer: - env['HTTP_REFERER'] = referer - accept = self.headers.get_all('accept', ()) - env['HTTP_ACCEPT'] = ','.join(accept) - ua = self.headers.get('user-agent') - if ua: - env['HTTP_USER_AGENT'] = ua - co = filter(None, self.headers.get_all('cookie', [])) - cookie_str = ', '.join(co) - if cookie_str: - env['HTTP_COOKIE'] = cookie_str - # XXX Other HTTP_* headers - # Since we're setting the env in the parent, provide empty - # values to override previously set values - for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', - 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'): - env.setdefault(k, "") - - self.send_response(HTTPStatus.OK, "Script output follows") - self.flush_headers() - - decoded_query = query.replace('+', ' ') - - if self.have_fork: - # Unix -- fork as we should - args = [script] - if '=' not in decoded_query: - args.append(decoded_query) - nobody = nobody_uid() - self.wfile.flush() # Always flush before forking - pid = os.fork() - if pid != 0: - # Parent - pid, sts = os.waitpid(pid, 0) - # throw away additional data [see bug #427345] - while select.select([self.rfile], [], [], 0)[0]: - if not self.rfile.read(1): - break - exitcode = os.waitstatus_to_exitcode(sts) - if exitcode: - self.log_error(f"CGI script exit code {exitcode}") - return - # Child - try: - try: - os.setuid(nobody) - except OSError: - pass - os.dup2(self.rfile.fileno(), 0) - os.dup2(self.wfile.fileno(), 1) - os.execve(scriptfile, args, env) - except: - self.server.handle_error(self.request, self.client_address) - os._exit(127) - - else: - # Non-Unix -- use subprocess - import subprocess - cmdline = [scriptfile] - if self.is_python(scriptfile): - interp = sys.executable - if interp.lower().endswith("w.exe"): - # On Windows, use python.exe, not pythonw.exe - interp = interp[:-5] + interp[-4:] - cmdline = [interp, '-u'] + cmdline - if '=' not in query: - cmdline.append(query) - self.log_message("command: %s", subprocess.list2cmdline(cmdline)) - try: - nbytes = int(length) - except (TypeError, ValueError): - nbytes = 0 - p = subprocess.Popen(cmdline, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - env = env - ) - if self.command.lower() == "post" and nbytes > 0: - data = self.rfile.read(nbytes) - else: - data = None - # throw away additional data [see bug #427345] - while select.select([self.rfile._sock], [], [], 0)[0]: - if not self.rfile._sock.recv(1): - break - stdout, stderr = p.communicate(data) - self.wfile.write(stdout) - if stderr: - self.log_error('%s', stderr) - p.stderr.close() - p.stdout.close() - status = p.returncode - if status: - self.log_error("CGI script exit status %#x", status) - else: - self.log_message("CGI script exited OK") - - def _get_best_family(*address): infos = socket.getaddrinfo( *address, @@ -1336,13 +999,12 @@ def test(HandlerClass=BaseHTTPRequestHandler, print("\nKeyboard interrupt received, exiting.") sys.exit(0) + if __name__ == '__main__': import argparse import contextlib parser = argparse.ArgumentParser(color=True) - parser.add_argument('--cgi', action='store_true', - help='run as CGI server') parser.add_argument('-b', '--bind', metavar='ADDRESS', help='bind to this address ' '(default: all interfaces)') @@ -1378,11 +1040,6 @@ if __name__ == '__main__': except OSError as e: parser.error(f"Failed to read TLS password file: {e}") - if args.cgi: - handler_class = CGIHTTPRequestHandler - else: - handler_class = SimpleHTTPRequestHandler - # ensure dual-stack is not disabled; ref #38907 class DualStackServer(ThreadingHTTPServer): @@ -1398,7 +1055,7 @@ if __name__ == '__main__': directory=args.directory) test( - HandlerClass=handler_class, + HandlerClass=SimpleHTTPRequestHandler, ServerClass=DualStackServer, port=args.port, bind=args.bind, diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 283a1055182..f2d1a02629d 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -2057,6 +2057,15 @@ def basicConfig(**kwargs): created FileHandler, causing it to be used when the file is opened in text mode. If not specified, the default value is `backslashreplace`. + formatter If specified, set this formatter instance for all involved + handlers. + If not specified, the default is to create and use an instance of + `logging.Formatter` based on arguments 'format', 'datefmt' and + 'style'. + When 'formatter' is specified together with any of the three + arguments 'format', 'datefmt' and 'style', a `ValueError` + is raised to signal that these arguments would lose meaning + otherwise. Note that you could specify a stream created using open(filename, mode) rather than passing the filename and mode in. However, it should be @@ -2079,6 +2088,9 @@ def basicConfig(**kwargs): .. versionchanged:: 3.9 Added the ``encoding`` and ``errors`` parameters. + + .. versionchanged:: 3.15 + Added the ``formatter`` parameter. """ # Add thread safety in case someone mistakenly calls # basicConfig() from multiple threads @@ -2114,13 +2126,19 @@ def basicConfig(**kwargs): stream = kwargs.pop("stream", None) h = StreamHandler(stream) handlers = [h] - dfs = kwargs.pop("datefmt", None) - style = kwargs.pop("style", '%') - if style not in _STYLES: - raise ValueError('Style must be one of: %s' % ','.join( - _STYLES.keys())) - fs = kwargs.pop("format", _STYLES[style][1]) - fmt = Formatter(fs, dfs, style) + fmt = kwargs.pop("formatter", None) + if fmt is None: + dfs = kwargs.pop("datefmt", None) + style = kwargs.pop("style", '%') + if style not in _STYLES: + raise ValueError('Style must be one of: %s' % ','.join( + _STYLES.keys())) + fs = kwargs.pop("format", _STYLES[style][1]) + fmt = Formatter(fs, dfs, style) + else: + for forbidden_key in ("datefmt", "format", "style"): + if forbidden_key in kwargs: + raise ValueError(f"{forbidden_key!r} should not be specified together with 'formatter'") for h in handlers: if h.formatter is None: h.setFormatter(fmt) diff --git a/Lib/platform.py b/Lib/platform.py index 55e211212d4..077db81264a 100644 --- a/Lib/platform.py +++ b/Lib/platform.py @@ -29,7 +29,7 @@ # # History: # -# <see CVS and SVN checkin messages for history> +# <see checkin messages for history> # # 1.0.9 - added invalidate_caches() function to invalidate cached values # 1.0.8 - changed Windows support to read version from kernel32.dll @@ -110,7 +110,7 @@ __copyright__ = """ """ -__version__ = '1.0.9' +__version__ = '1.1.0' import collections import os @@ -528,53 +528,6 @@ def ios_ver(system="", release="", model="", is_simulator=False): return IOSVersionInfo(system, release, model, is_simulator) -def _java_getprop(name, default): - """This private helper is deprecated in 3.13 and will be removed in 3.15""" - from java.lang import System - try: - value = System.getProperty(name) - if value is None: - return default - return value - except AttributeError: - return default - -def java_ver(release='', vendor='', vminfo=('', '', ''), osinfo=('', '', '')): - - """ Version interface for Jython. - - Returns a tuple (release, vendor, vminfo, osinfo) with vminfo being - a tuple (vm_name, vm_release, vm_vendor) and osinfo being a - tuple (os_name, os_version, os_arch). - - Values which cannot be determined are set to the defaults - given as parameters (which all default to ''). - - """ - import warnings - warnings._deprecated('java_ver', remove=(3, 15)) - # Import the needed APIs - try: - import java.lang # noqa: F401 - except ImportError: - return release, vendor, vminfo, osinfo - - vendor = _java_getprop('java.vendor', vendor) - release = _java_getprop('java.version', release) - vm_name, vm_release, vm_vendor = vminfo - vm_name = _java_getprop('java.vm.name', vm_name) - vm_vendor = _java_getprop('java.vm.vendor', vm_vendor) - vm_release = _java_getprop('java.vm.version', vm_release) - vminfo = vm_name, vm_release, vm_vendor - os_name, os_version, os_arch = osinfo - os_arch = _java_getprop('java.os.arch', os_arch) - os_name = _java_getprop('java.os.name', os_name) - os_version = _java_getprop('java.os.version', os_version) - osinfo = os_name, os_version, os_arch - - return release, vendor, vminfo, osinfo - - AndroidVer = collections.namedtuple( "AndroidVer", "release api_level manufacturer model device is_emulator") @@ -1034,13 +987,6 @@ def uname(): version = '16bit' system = 'Windows' - elif system[:4] == 'java': - release, vendor, vminfo, osinfo = java_ver() - system = 'Java' - version = ', '.join(vminfo) - if not version: - version = vendor - # System specific extensions if system == 'OpenVMS': # OpenVMS seems to have release and version mixed up @@ -1370,15 +1316,6 @@ def platform(aliased=False, terse=False): platform = _platform(system, release, machine, processor, 'with', libcname+libcversion) - elif system == 'Java': - # Java platforms - r, v, vminfo, (os_name, os_version, os_arch) = java_ver() - if terse or not os_name: - platform = _platform(system, release, version) - else: - platform = _platform(system, release, version, - 'on', - os_name, os_version, os_arch) else: # Generic handler diff --git a/Lib/string/__init__.py b/Lib/string/__init__.py index eab5067c9b1..b788d7136f1 100644 --- a/Lib/string/__init__.py +++ b/Lib/string/__init__.py @@ -191,16 +191,14 @@ class Template: ######################################################################## -# the Formatter class -# see PEP 3101 for details and purpose of this class - -# The hard parts are reused from the C implementation. They're exposed as "_" -# prefixed methods of str. - +# The Formatter class (PEP 3101). +# # The overall parser is implemented in _string.formatter_parser. -# The field name parser is implemented in _string.formatter_field_name_split +# The field name parser is implemented in _string.formatter_field_name_split. class Formatter: + """See PEP 3101 for details and purpose of this class.""" + def format(self, format_string, /, *args, **kwargs): return self.vformat(format_string, args, kwargs) @@ -264,22 +262,18 @@ class Formatter: return ''.join(result), auto_arg_index - def get_value(self, key, args, kwargs): if isinstance(key, int): return args[key] else: return kwargs[key] - def check_unused_args(self, used_args, args, kwargs): pass - def format_field(self, value, format_spec): return format(value, format_spec) - def convert_field(self, value, conversion): # do any conversion on the resulting object if conversion is None: @@ -292,28 +286,26 @@ class Formatter: return ascii(value) raise ValueError("Unknown conversion specifier {0!s}".format(conversion)) - - # returns an iterable that contains tuples of the form: - # (literal_text, field_name, format_spec, conversion) - # literal_text can be zero length - # field_name can be None, in which case there's no - # object to format and output - # if field_name is not None, it is looked up, formatted - # with format_spec and conversion and then used def parse(self, format_string): + """ + Return an iterable that contains tuples of the form + (literal_text, field_name, format_spec, conversion). + + *field_name* can be None, in which case there's no object + to format and output; otherwise, it is looked up and + formatted with *format_spec* and *conversion*. + """ return _string.formatter_parser(format_string) - - # given a field_name, find the object it references. - # field_name: the field being looked up, e.g. "0.name" - # or "lookup[3]" - # used_args: a set of which args have been used - # args, kwargs: as passed in to vformat def get_field(self, field_name, args, kwargs): - first, rest = _string.formatter_field_name_split(field_name) + """Find the object referenced by a given field name. + The field name *field_name* can be for instance "0.name" + or "lookup[3]". The *args* and *kwargs* arguments are + passed to get_value(). + """ + first, rest = _string.formatter_field_name_split(field_name) obj = self.get_value(first, args, kwargs) - # loop through the rest of the field_name, doing # getattr or getitem as needed for is_attr, i in rest: @@ -321,5 +313,4 @@ class Formatter: obj = getattr(obj, i) else: obj = obj[i] - return obj, first diff --git a/Lib/tarfile.py b/Lib/tarfile.py index c0f5a609b9f..212b71f6509 100644 --- a/Lib/tarfile.py +++ b/Lib/tarfile.py @@ -2065,7 +2065,7 @@ class TarFile(object): "gz": "gzopen", # gzip compressed tar "bz2": "bz2open", # bzip2 compressed tar "xz": "xzopen", # lzma compressed tar - "zst": "zstopen" # zstd compressed tar + "zst": "zstopen", # zstd compressed tar } #-------------------------------------------------------------------------- @@ -2439,7 +2439,7 @@ class TarFile(object): unfiltered = tarinfo try: tarinfo = filter_function(tarinfo, path) - except (OSError, FilterError) as e: + except (OSError, UnicodeEncodeError, FilterError) as e: self._handle_fatal_error(e) except ExtractError as e: self._handle_nonfatal_error(e) @@ -2460,7 +2460,7 @@ class TarFile(object): self._extract_member(tarinfo, os.path.join(path, tarinfo.name), set_attrs=set_attrs, numeric_owner=numeric_owner) - except OSError as e: + except (OSError, UnicodeEncodeError) as e: self._handle_fatal_error(e) except ExtractError as e: self._handle_nonfatal_error(e) diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index c74c3a31909..9b6e80fdad9 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -696,9 +696,11 @@ def sortdict(dict): return "{%s}" % withcommas -def run_code(code: str) -> dict[str, object]: +def run_code(code: str, extra_names: dict[str, object] | None = None) -> dict[str, object]: """Run a piece of code after dedenting it, and return its global namespace.""" ns = {} + if extra_names: + ns.update(extra_names) exec(textwrap.dedent(code), ns) return ns diff --git a/Lib/test/support/hashlib_helper.py b/Lib/test/support/hashlib_helper.py index 5043f08dd93..7032257b068 100644 --- a/Lib/test/support/hashlib_helper.py +++ b/Lib/test/support/hashlib_helper.py @@ -23,6 +23,22 @@ def requires_builtin_hmac(): return unittest.skipIf(_hmac is None, "requires _hmac") +def _missing_hash(digestname, implementation=None, *, exc=None): + parts = ["missing", implementation, f"hash algorithm: {digestname!r}"] + msg = " ".join(filter(None, parts)) + raise unittest.SkipTest(msg) from exc + + +def _openssl_availabillity(digestname, *, usedforsecurity): + try: + _hashlib.new(digestname, usedforsecurity=usedforsecurity) + except AttributeError: + assert _hashlib is None + _missing_hash(digestname, "OpenSSL") + except ValueError as exc: + _missing_hash(digestname, "OpenSSL", exc=exc) + + def _decorate_func_or_class(func_or_class, decorator_func): if not isinstance(func_or_class, type): return decorator_func(func_or_class) @@ -71,8 +87,7 @@ def requires_hashdigest(digestname, openssl=None, usedforsecurity=True): try: test_availability() except ValueError as exc: - msg = f"missing hash algorithm: {digestname!r}" - raise unittest.SkipTest(msg) from exc + _missing_hash(digestname, exc=exc) return func(*args, **kwargs) return wrapper @@ -87,14 +102,44 @@ def requires_openssl_hashdigest(digestname, *, usedforsecurity=True): The hashing algorithm may be missing or blocked by a strict crypto policy. """ def decorator_func(func): - @requires_hashlib() + @requires_hashlib() # avoid checking at each call @functools.wraps(func) def wrapper(*args, **kwargs): + _openssl_availabillity(digestname, usedforsecurity=usedforsecurity) + return func(*args, **kwargs) + return wrapper + + def decorator(func_or_class): + return _decorate_func_or_class(func_or_class, decorator_func) + return decorator + + +def find_openssl_hashdigest_constructor(digestname, *, usedforsecurity=True): + """Find the OpenSSL hash function constructor by its name.""" + assert isinstance(digestname, str), digestname + _openssl_availabillity(digestname, usedforsecurity=usedforsecurity) + # This returns a function of the form _hashlib.openssl_<name> and + # not a lambda function as it is rejected by _hashlib.hmac_new(). + return getattr(_hashlib, f"openssl_{digestname}") + + +def requires_builtin_hashdigest( + module_name, digestname, *, usedforsecurity=True +): + """Decorator raising SkipTest if a HACL* hashing algorithm is missing. + + - The *module_name* is the C extension module name based on HACL*. + - The *digestname* is one of its member, e.g., 'md5'. + """ + def decorator_func(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + module = import_module(module_name) try: - _hashlib.new(digestname, usedforsecurity=usedforsecurity) - except ValueError: - msg = f"missing OpenSSL hash algorithm: {digestname!r}" - raise unittest.SkipTest(msg) + getattr(module, digestname) + except AttributeError: + fullname = f'{module_name}.{digestname}' + _missing_hash(fullname, implementation="HACL") return func(*args, **kwargs) return wrapper @@ -103,6 +148,168 @@ def requires_openssl_hashdigest(digestname, *, usedforsecurity=True): return decorator +def find_builtin_hashdigest_constructor( + module_name, digestname, *, usedforsecurity=True +): + """Find the HACL* hash function constructor. + + - The *module_name* is the C extension module name based on HACL*. + - The *digestname* is one of its member, e.g., 'md5'. + """ + module = import_module(module_name) + try: + constructor = getattr(module, digestname) + constructor(b'', usedforsecurity=usedforsecurity) + except (AttributeError, TypeError, ValueError): + _missing_hash(f'{module_name}.{digestname}', implementation="HACL") + return constructor + + +class HashFunctionsTrait: + """Mixin trait class containing hash functions. + + This class is assumed to have all unitest.TestCase methods but should + not directly inherit from it to prevent the test suite being run on it. + + Subclasses should implement the hash functions by returning an object + that can be recognized as a valid digestmod parameter for both hashlib + and HMAC. In particular, it cannot be a lambda function as it will not + be recognized by hashlib (it will still be accepted by the pure Python + implementation of HMAC). + """ + + ALGORITHMS = [ + 'md5', 'sha1', + 'sha224', 'sha256', 'sha384', 'sha512', + 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', + ] + + # Default 'usedforsecurity' to use when looking up a hash function. + usedforsecurity = True + + def _find_constructor(self, name): + # By default, a missing algorithm skips the test that uses it. + self.assertIn(name, self.ALGORITHMS) + self.skipTest(f"missing hash function: {name}") + + @property + def md5(self): + return self._find_constructor("md5") + + @property + def sha1(self): + return self._find_constructor("sha1") + + @property + def sha224(self): + return self._find_constructor("sha224") + + @property + def sha256(self): + return self._find_constructor("sha256") + + @property + def sha384(self): + return self._find_constructor("sha384") + + @property + def sha512(self): + return self._find_constructor("sha512") + + @property + def sha3_224(self): + return self._find_constructor("sha3_224") + + @property + def sha3_256(self): + return self._find_constructor("sha3_256") + + @property + def sha3_384(self): + return self._find_constructor("sha3_384") + + @property + def sha3_512(self): + return self._find_constructor("sha3_512") + + +class NamedHashFunctionsTrait(HashFunctionsTrait): + """Trait containing named hash functions. + + Hash functions are available if and only if they are available in hashlib. + """ + + def _find_constructor(self, name): + self.assertIn(name, self.ALGORITHMS) + return name + + +class OpenSSLHashFunctionsTrait(HashFunctionsTrait): + """Trait containing OpenSSL hash functions. + + Hash functions are available if and only if they are available in _hashlib. + """ + + def _find_constructor(self, name): + self.assertIn(name, self.ALGORITHMS) + return find_openssl_hashdigest_constructor( + name, usedforsecurity=self.usedforsecurity + ) + + +class BuiltinHashFunctionsTrait(HashFunctionsTrait): + """Trait containing HACL* hash functions. + + Hash functions are available if and only if they are available in C. + In particular, HACL* HMAC-MD5 may be available even though HACL* md5 + is not since the former is unconditionally built. + """ + + def _find_constructor_in(self, module, name): + self.assertIn(name, self.ALGORITHMS) + return find_builtin_hashdigest_constructor(module, name) + + @property + def md5(self): + return self._find_constructor_in("_md5", "md5") + + @property + def sha1(self): + return self._find_constructor_in("_sha1", "sha1") + + @property + def sha224(self): + return self._find_constructor_in("_sha2", "sha224") + + @property + def sha256(self): + return self._find_constructor_in("_sha2", "sha256") + + @property + def sha384(self): + return self._find_constructor_in("_sha2", "sha384") + + @property + def sha512(self): + return self._find_constructor_in("_sha2", "sha512") + + @property + def sha3_224(self): + return self._find_constructor_in("_sha3", "sha3_224") + + @property + def sha3_256(self): + return self._find_constructor_in("_sha3","sha3_256") + + @property + def sha3_384(self): + return self._find_constructor_in("_sha3","sha3_384") + + @property + def sha3_512(self): + return self._find_constructor_in("_sha3","sha3_512") + + def find_gil_minsize(modules_names, default=2048): """Get the largest GIL_MINSIZE value for the given cryptographic modules. diff --git a/Lib/test/test_asyncio/test_futures.py b/Lib/test/test_asyncio/test_futures.py index 8b51522278a..39bef465bdb 100644 --- a/Lib/test/test_asyncio/test_futures.py +++ b/Lib/test/test_asyncio/test_futures.py @@ -413,7 +413,7 @@ class BaseFutureTests: def test_copy_state(self): from asyncio.futures import _copy_future_state - f = self._new_future(loop=self.loop) + f = concurrent.futures.Future() f.set_result(10) newf = self._new_future(loop=self.loop) @@ -421,7 +421,7 @@ class BaseFutureTests: self.assertTrue(newf.done()) self.assertEqual(newf.result(), 10) - f_exception = self._new_future(loop=self.loop) + f_exception = concurrent.futures.Future() f_exception.set_exception(RuntimeError()) newf_exception = self._new_future(loop=self.loop) @@ -429,7 +429,7 @@ class BaseFutureTests: self.assertTrue(newf_exception.done()) self.assertRaises(RuntimeError, newf_exception.result) - f_cancelled = self._new_future(loop=self.loop) + f_cancelled = concurrent.futures.Future() f_cancelled.cancel() newf_cancelled = self._new_future(loop=self.loop) @@ -441,7 +441,7 @@ class BaseFutureTests: except BaseException as e: f_exc = e - f_conexc = self._new_future(loop=self.loop) + f_conexc = concurrent.futures.Future() f_conexc.set_exception(f_exc) newf_conexc = self._new_future(loop=self.loop) @@ -454,6 +454,56 @@ class BaseFutureTests: newf_tb = ''.join(traceback.format_tb(newf_exc.__traceback__)) self.assertEqual(newf_tb.count('raise concurrent.futures.InvalidStateError'), 1) + def test_copy_state_from_concurrent_futures(self): + """Test _copy_future_state from concurrent.futures.Future. + + This tests the optimized path using _get_snapshot when available. + """ + from asyncio.futures import _copy_future_state + + # Test with a result + f_concurrent = concurrent.futures.Future() + f_concurrent.set_result(42) + f_asyncio = self._new_future(loop=self.loop) + _copy_future_state(f_concurrent, f_asyncio) + self.assertTrue(f_asyncio.done()) + self.assertEqual(f_asyncio.result(), 42) + + # Test with an exception + f_concurrent_exc = concurrent.futures.Future() + f_concurrent_exc.set_exception(ValueError("test exception")) + f_asyncio_exc = self._new_future(loop=self.loop) + _copy_future_state(f_concurrent_exc, f_asyncio_exc) + self.assertTrue(f_asyncio_exc.done()) + with self.assertRaises(ValueError) as cm: + f_asyncio_exc.result() + self.assertEqual(str(cm.exception), "test exception") + + # Test with cancelled state + f_concurrent_cancelled = concurrent.futures.Future() + f_concurrent_cancelled.cancel() + f_asyncio_cancelled = self._new_future(loop=self.loop) + _copy_future_state(f_concurrent_cancelled, f_asyncio_cancelled) + self.assertTrue(f_asyncio_cancelled.cancelled()) + + # Test that destination already cancelled prevents copy + f_concurrent_result = concurrent.futures.Future() + f_concurrent_result.set_result(10) + f_asyncio_precancelled = self._new_future(loop=self.loop) + f_asyncio_precancelled.cancel() + _copy_future_state(f_concurrent_result, f_asyncio_precancelled) + self.assertTrue(f_asyncio_precancelled.cancelled()) + + # Test exception type conversion + f_concurrent_invalid = concurrent.futures.Future() + f_concurrent_invalid.set_exception(concurrent.futures.InvalidStateError("invalid")) + f_asyncio_invalid = self._new_future(loop=self.loop) + _copy_future_state(f_concurrent_invalid, f_asyncio_invalid) + self.assertTrue(f_asyncio_invalid.done()) + with self.assertRaises(asyncio.exceptions.InvalidStateError) as cm: + f_asyncio_invalid.result() + self.assertEqual(str(cm.exception), "invalid") + def test_iter(self): fut = self._new_future(loop=self.loop) diff --git a/Lib/test/test_concurrent_futures/test_future.py b/Lib/test/test_concurrent_futures/test_future.py index 4066ea1ee4b..06b11a3bacf 100644 --- a/Lib/test/test_concurrent_futures/test_future.py +++ b/Lib/test/test_concurrent_futures/test_future.py @@ -6,6 +6,7 @@ from concurrent.futures._base import ( PENDING, RUNNING, CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED, Future) from test import support +from test.support import threading_helper from .util import ( PENDING_FUTURE, RUNNING_FUTURE, CANCELLED_FUTURE, @@ -282,6 +283,62 @@ class FutureTests(BaseTestCase): self.assertEqual(f.exception(), e) + def test_get_snapshot(self): + """Test the _get_snapshot method for atomic state retrieval.""" + # Test with a pending future + f = Future() + done, cancelled, result, exception = f._get_snapshot() + self.assertFalse(done) + self.assertFalse(cancelled) + self.assertIsNone(result) + self.assertIsNone(exception) + + # Test with a finished future (successful result) + f = Future() + f.set_result(42) + done, cancelled, result, exception = f._get_snapshot() + self.assertTrue(done) + self.assertFalse(cancelled) + self.assertEqual(result, 42) + self.assertIsNone(exception) + + # Test with a finished future (exception) + f = Future() + exc = ValueError("test error") + f.set_exception(exc) + done, cancelled, result, exception = f._get_snapshot() + self.assertTrue(done) + self.assertFalse(cancelled) + self.assertIsNone(result) + self.assertIs(exception, exc) + + # Test with a cancelled future + f = Future() + f.cancel() + done, cancelled, result, exception = f._get_snapshot() + self.assertTrue(done) + self.assertTrue(cancelled) + self.assertIsNone(result) + self.assertIsNone(exception) + + # Test concurrent access (basic thread safety check) + f = Future() + f.set_result(100) + results = [] + + def get_snapshot(): + for _ in range(1000): + snapshot = f._get_snapshot() + results.append(snapshot) + + threads = [threading.Thread(target=get_snapshot) for _ in range(4)] + with threading_helper.start_threads(threads): + pass + # All snapshots should be identical for a finished future + expected = (True, False, 100, None) + for result in results: + self.assertEqual(result, expected) + def setUpModule(): setup_module() diff --git a/Lib/test/test_faulthandler.py b/Lib/test/test_faulthandler.py index 371c63adce9..2fb963f52e5 100644 --- a/Lib/test/test_faulthandler.py +++ b/Lib/test/test_faulthandler.py @@ -166,29 +166,6 @@ class FaultHandlerTests(unittest.TestCase): fatal_error = 'Windows fatal exception: %s' % name_regex self.check_error(code, line_number, fatal_error, **kw) - @unittest.skipIf(sys.platform.startswith('aix'), - "the first page of memory is a mapped read-only on AIX") - def test_read_null(self): - if not MS_WINDOWS: - self.check_fatal_error(""" - import faulthandler - faulthandler.enable() - faulthandler._read_null() - """, - 3, - # Issue #12700: Read NULL raises SIGILL on Mac OS X Lion - '(?:Segmentation fault' - '|Bus error' - '|Illegal instruction)') - else: - self.check_windows_exception(""" - import faulthandler - faulthandler.enable() - faulthandler._read_null() - """, - 3, - 'access violation') - @skip_segfault_on_android def test_sigsegv(self): self.check_fatal_error(""" diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py index 84faa636064..96b3f305194 100644 --- a/Lib/test/test_fractions.py +++ b/Lib/test/test_fractions.py @@ -1,7 +1,7 @@ """Tests for Lib/fractions.py.""" from decimal import Decimal -from test.support import requires_IEEE_754 +from test.support import requires_IEEE_754, adjust_int_max_str_digits import math import numbers import operator @@ -395,12 +395,14 @@ class FractionTest(unittest.TestCase): def testFromString(self): self.assertEqual((5, 1), _components(F("5"))) + self.assertEqual((5, 1), _components(F("005"))) self.assertEqual((3, 2), _components(F("3/2"))) self.assertEqual((3, 2), _components(F("3 / 2"))) self.assertEqual((3, 2), _components(F(" \n +3/2"))) self.assertEqual((-3, 2), _components(F("-3/2 "))) - self.assertEqual((13, 2), _components(F(" 013/02 \n "))) + self.assertEqual((13, 2), _components(F(" 0013/002 \n "))) self.assertEqual((16, 5), _components(F(" 3.2 "))) + self.assertEqual((16, 5), _components(F("003.2"))) self.assertEqual((-16, 5), _components(F(" -3.2 "))) self.assertEqual((-3, 1), _components(F(" -3. "))) self.assertEqual((3, 5), _components(F(" .6 "))) @@ -419,116 +421,102 @@ class FractionTest(unittest.TestCase): self.assertRaisesMessage( ZeroDivisionError, "Fraction(3, 0)", F, "3/0") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '3/'", - F, "3/") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '/2'", - F, "/2") - self.assertRaisesMessage( - # Denominators don't need a sign. - ValueError, "Invalid literal for Fraction: '3/+2'", - F, "3/+2") - self.assertRaisesMessage( - # Imitate float's parsing. - ValueError, "Invalid literal for Fraction: '+ 3/2'", - F, "+ 3/2") - self.assertRaisesMessage( - # Avoid treating '.' as a regex special character. - ValueError, "Invalid literal for Fraction: '3a2'", - F, "3a2") - self.assertRaisesMessage( - # Don't accept combinations of decimals and rationals. - ValueError, "Invalid literal for Fraction: '3/7.2'", - F, "3/7.2") - self.assertRaisesMessage( - # Don't accept combinations of decimals and rationals. - ValueError, "Invalid literal for Fraction: '3.2/7'", - F, "3.2/7") - self.assertRaisesMessage( - # Allow 3. and .3, but not . - ValueError, "Invalid literal for Fraction: '.'", - F, ".") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '_'", - F, "_") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '_1'", - F, "_1") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1__2'", - F, "1__2") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '/_'", - F, "/_") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1_/'", - F, "1_/") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '_1/'", - F, "_1/") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1__2/'", - F, "1__2/") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1/_'", - F, "1/_") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1/_1'", - F, "1/_1") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1/1__2'", - F, "1/1__2") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1._111'", - F, "1._111") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1.1__1'", - F, "1.1__1") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1.1e+_1'", - F, "1.1e+_1") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1.1e+1__1'", - F, "1.1e+1__1") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '123.dd'", - F, "123.dd") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '123.5_dd'", - F, "123.5_dd") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: 'dd.5'", - F, "dd.5") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '7_dd'", - F, "7_dd") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1/dd'", - F, "1/dd") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1/123_dd'", - F, "1/123_dd") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '789edd'", - F, "789edd") - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '789e2_dd'", - F, "789e2_dd") + + def check_invalid(s): + msg = "Invalid literal for Fraction: " + repr(s) + self.assertRaisesMessage(ValueError, msg, F, s) + + check_invalid("3/") + check_invalid("/2") + # Denominators don't need a sign. + check_invalid("3/+2") + check_invalid("3/-2") + # Imitate float's parsing. + check_invalid("+ 3/2") + check_invalid("- 3/2") + # Avoid treating '.' as a regex special character. + check_invalid("3a2") + # Don't accept combinations of decimals and rationals. + check_invalid("3/7.2") + check_invalid("3.2/7") + # No space around dot. + check_invalid("3 .2") + check_invalid("3. 2") + # No space around e. + check_invalid("3.2 e1") + check_invalid("3.2e 1") + # Fractional part don't need a sign. + check_invalid("3.+2") + check_invalid("3.-2") + # Only accept base 10. + check_invalid("0x10") + check_invalid("0x10/1") + check_invalid("1/0x10") + check_invalid("0x10.") + check_invalid("0x10.1") + check_invalid("1.0x10") + check_invalid("1.0e0x10") + # Only accept decimal digits. + check_invalid("³") + check_invalid("³/2") + check_invalid("3/²") + check_invalid("³.2") + check_invalid("3.²") + check_invalid("3.2e²") + check_invalid("¼") + # Allow 3. and .3, but not . + check_invalid(".") + check_invalid("_") + check_invalid("_1") + check_invalid("1__2") + check_invalid("/_") + check_invalid("1_/") + check_invalid("_1/") + check_invalid("1__2/") + check_invalid("1/_") + check_invalid("1/_1") + check_invalid("1/1__2") + check_invalid("1._111") + check_invalid("1.1__1") + check_invalid("1.1e+_1") + check_invalid("1.1e+1__1") + check_invalid("123.dd") + check_invalid("123.5_dd") + check_invalid("dd.5") + check_invalid("7_dd") + check_invalid("1/dd") + check_invalid("1/123_dd") + check_invalid("789edd") + check_invalid("789e2_dd") # Test catastrophic backtracking. val = "9"*50 + "_" - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '" + val + "'", - F, val) - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1/" + val + "'", - F, "1/" + val) - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1." + val + "'", - F, "1." + val) - self.assertRaisesMessage( - ValueError, "Invalid literal for Fraction: '1.1+e" + val + "'", - F, "1.1+e" + val) + check_invalid(val) + check_invalid("1/" + val) + check_invalid("1." + val) + check_invalid("." + val) + check_invalid("1.1+e" + val) + check_invalid("1.1e" + val) + + def test_limit_int(self): + maxdigits = 5000 + with adjust_int_max_str_digits(maxdigits): + msg = 'Exceeds the limit' + val = '1' * maxdigits + num = (10**maxdigits - 1)//9 + self.assertEqual((num, 1), _components(F(val))) + self.assertRaisesRegex(ValueError, msg, F, val + '1') + self.assertEqual((num, 2), _components(F(val + '/2'))) + self.assertRaisesRegex(ValueError, msg, F, val + '1/2') + self.assertEqual((1, num), _components(F('1/' + val))) + self.assertRaisesRegex(ValueError, msg, F, '1/1' + val) + self.assertEqual(((10**(maxdigits+1) - 1)//9, 10**maxdigits), + _components(F('1.' + val))) + self.assertRaisesRegex(ValueError, msg, F, '1.1' + val) + self.assertEqual((num, 10**maxdigits), _components(F('.' + val))) + self.assertRaisesRegex(ValueError, msg, F, '.1' + val) + self.assertRaisesRegex(ValueError, msg, F, '1.1e1' + val) + self.assertEqual((11, 10), _components(F('1.1e' + '0' * maxdigits))) + self.assertRaisesRegex(ValueError, msg, F, '1.1e' + '0' * (maxdigits+1)) def testImmutable(self): r = F(7, 3) diff --git a/Lib/test/test_free_threading/test_functools.py b/Lib/test/test_free_threading/test_functools.py new file mode 100644 index 00000000000..a442fe056ce --- /dev/null +++ b/Lib/test/test_free_threading/test_functools.py @@ -0,0 +1,75 @@ +import random +import unittest + +from functools import lru_cache +from threading import Barrier, Thread + +from test.support import threading_helper + +@threading_helper.requires_working_threading() +class TestLRUCache(unittest.TestCase): + + def _test_concurrent_operations(self, maxsize): + num_threads = 10 + b = Barrier(num_threads) + @lru_cache(maxsize=maxsize) + def func(arg=0): + return object() + + + def thread_func(): + b.wait() + for i in range(1000): + r = random.randint(0, 1000) + if i < 800: + func(i) + elif i < 900: + func.cache_info() + else: + func.cache_clear() + + threads = [] + for i in range(num_threads): + t = Thread(target=thread_func) + threads.append(t) + + with threading_helper.start_threads(threads): + pass + + def test_concurrent_operations_unbounded(self): + self._test_concurrent_operations(maxsize=None) + + def test_concurrent_operations_bounded(self): + self._test_concurrent_operations(maxsize=128) + + def _test_reentrant_cache_clear(self, maxsize): + num_threads = 10 + b = Barrier(num_threads) + @lru_cache(maxsize=maxsize) + def func(arg=0): + func.cache_clear() + return object() + + + def thread_func(): + b.wait() + for i in range(1000): + func(random.randint(0, 10000)) + + threads = [] + for i in range(num_threads): + t = Thread(target=thread_func) + threads.append(t) + + with threading_helper.start_threads(threads): + pass + + def test_reentrant_cache_clear_unbounded(self): + self._test_reentrant_cache_clear(maxsize=None) + + def test_reentrant_cache_clear_bounded(self): + self._test_reentrant_cache_clear(maxsize=128) + + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py index 8d21ded4501..ea0dc241e39 100644 --- a/Lib/test/test_genericalias.py +++ b/Lib/test/test_genericalias.py @@ -61,6 +61,7 @@ try: from tkinter import Event except ImportError: Event = None +from string.templatelib import Template, Interpolation from typing import TypeVar T = TypeVar('T') @@ -139,7 +140,10 @@ class BaseTest(unittest.TestCase): DictReader, DictWriter, array, staticmethod, - classmethod] + classmethod, + Template, + Interpolation, + ] if ctypes is not None: generic_types.extend((ctypes.Array, ctypes.LibraryLoader, ctypes.py_object)) if ValueProxy is not None: diff --git a/Lib/test/test_hmac.py b/Lib/test/test_hmac.py index 70c79437722..e898644dd8a 100644 --- a/Lib/test/test_hmac.py +++ b/Lib/test/test_hmac.py @@ -1,8 +1,27 @@ +"""Test suite for HMAC. + +Python provides three different implementations of HMAC: + +- OpenSSL HMAC using OpenSSL hash functions. +- HACL* HMAC using HACL* hash functions. +- Generic Python HMAC using user-defined hash functions. + +The generic Python HMAC implementation is able to use OpenSSL +callables or names, HACL* named hash functions or arbitrary +objects implementing PEP 247 interface. + +In the two first cases, Python HMAC wraps a C HMAC object (either OpenSSL +or HACL*-based). As a last resort, HMAC is re-implemented in pure Python. +It is however interesting to test the pure Python implementation against +the OpenSSL and HACL* hash functions. +""" + import binascii import functools import hmac import hashlib import random +import test.support import test.support.hashlib_helper as hashlib_helper import types import unittest @@ -10,6 +29,12 @@ import unittest.mock as mock import warnings from _operator import _compare_digest as operator_compare_digest from test.support import check_disallow_instantiation +from test.support.hashlib_helper import ( + BuiltinHashFunctionsTrait, + HashFunctionsTrait, + NamedHashFunctionsTrait, + OpenSSLHashFunctionsTrait, +) from test.support.import_helper import import_fresh_module, import_module try: @@ -382,50 +407,7 @@ class BuiltinAssertersMixin(ThroughBuiltinAPIMixin, AssertersMixin): pass -class HashFunctionsTrait: - """Trait class for 'hashfunc' in hmac_new() and hmac_digest().""" - - ALGORITHMS = [ - 'md5', 'sha1', - 'sha224', 'sha256', 'sha384', 'sha512', - 'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512', - ] - - # By default, a missing algorithm skips the test that uses it. - _ = property(lambda self: self.skipTest("missing hash function")) - md5 = sha1 = _ - sha224 = sha256 = sha384 = sha512 = _ - sha3_224 = sha3_256 = sha3_384 = sha3_512 = _ - del _ - - -class WithOpenSSLHashFunctions(HashFunctionsTrait): - """Test a HMAC implementation with an OpenSSL-based callable 'hashfunc'.""" - - @classmethod - def setUpClass(cls): - super().setUpClass() - - for name in cls.ALGORITHMS: - @property - @hashlib_helper.requires_openssl_hashdigest(name) - def func(self, *, __name=name): # __name needed to bind 'name' - return getattr(_hashlib, f'openssl_{__name}') - setattr(cls, name, func) - - -class WithNamedHashFunctions(HashFunctionsTrait): - """Test a HMAC implementation with a named 'hashfunc'.""" - - @classmethod - def setUpClass(cls): - super().setUpClass() - - for name in cls.ALGORITHMS: - setattr(cls, name, name) - - -class RFCTestCaseMixin(AssertersMixin, HashFunctionsTrait): +class RFCTestCaseMixin(HashFunctionsTrait, AssertersMixin): """Test HMAC implementations against RFC 2202/4231 and NIST test vectors. - Test vectors for MD5 and SHA-1 are taken from RFC 2202. @@ -739,26 +721,83 @@ class RFCTestCaseMixin(AssertersMixin, HashFunctionsTrait): ) -class PyRFCTestCase(ThroughObjectMixin, PyAssertersMixin, - WithOpenSSLHashFunctions, RFCTestCaseMixin, - unittest.TestCase): +class PurePythonInitHMAC(PyModuleMixin, HashFunctionsTrait): + + @classmethod + def setUpClass(cls): + super().setUpClass() + for meth in ['_init_openssl_hmac', '_init_builtin_hmac']: + fn = getattr(cls.hmac.HMAC, meth) + cm = mock.patch.object(cls.hmac.HMAC, meth, autospec=True, wraps=fn) + cls.enterClassContext(cm) + + @classmethod + def tearDownClass(cls): + cls.hmac.HMAC._init_openssl_hmac.assert_not_called() + cls.hmac.HMAC._init_builtin_hmac.assert_not_called() + # Do not assert that HMAC._init_old() has been called as it's tricky + # to determine whether a test for a specific hash function has been + # executed or not. On regular builds, it will be called but if a + # hash function is not available, it's hard to detect for which + # test we should checj HMAC._init_old() or not. + super().tearDownClass() + + +class PyRFCOpenSSLTestCase(ThroughObjectMixin, + PyAssertersMixin, + OpenSSLHashFunctionsTrait, + RFCTestCaseMixin, + PurePythonInitHMAC, + unittest.TestCase): """Python implementation of HMAC using hmac.HMAC(). - The underlying hash functions are OpenSSL-based. + The underlying hash functions are OpenSSL-based but + _init_old() is used instead of _init_openssl_hmac(). """ -class PyDotNewRFCTestCase(ThroughModuleAPIMixin, PyAssertersMixin, - WithOpenSSLHashFunctions, RFCTestCaseMixin, - unittest.TestCase): +class PyRFCBuiltinTestCase(ThroughObjectMixin, + PyAssertersMixin, + BuiltinHashFunctionsTrait, + RFCTestCaseMixin, + PurePythonInitHMAC, + unittest.TestCase): + """Python implementation of HMAC using hmac.HMAC(). + + The underlying hash functions are HACL*-based but + _init_old() is used instead of _init_builtin_hmac(). + """ + + +class PyDotNewOpenSSLRFCTestCase(ThroughModuleAPIMixin, + PyAssertersMixin, + OpenSSLHashFunctionsTrait, + RFCTestCaseMixin, + PurePythonInitHMAC, + unittest.TestCase): + """Python implementation of HMAC using hmac.new(). + + The underlying hash functions are OpenSSL-based but + _init_old() is used instead of _init_openssl_hmac(). + """ + + +class PyDotNewBuiltinRFCTestCase(ThroughModuleAPIMixin, + PyAssertersMixin, + BuiltinHashFunctionsTrait, + RFCTestCaseMixin, + PurePythonInitHMAC, + unittest.TestCase): """Python implementation of HMAC using hmac.new(). - The underlying hash functions are OpenSSL-based. + The underlying hash functions are HACL-based but + _init_old() is used instead of _init_openssl_hmac(). """ class OpenSSLRFCTestCase(OpenSSLAssertersMixin, - WithOpenSSLHashFunctions, RFCTestCaseMixin, + OpenSSLHashFunctionsTrait, + RFCTestCaseMixin, unittest.TestCase): """OpenSSL implementation of HMAC. @@ -767,7 +806,8 @@ class OpenSSLRFCTestCase(OpenSSLAssertersMixin, class BuiltinRFCTestCase(BuiltinAssertersMixin, - WithNamedHashFunctions, RFCTestCaseMixin, + NamedHashFunctionsTrait, + RFCTestCaseMixin, unittest.TestCase): """Built-in HACL* implementation of HMAC. @@ -784,12 +824,6 @@ class BuiltinRFCTestCase(BuiltinAssertersMixin, self.check_hmac_hexdigest(key, msg, hexdigest, digest_size, func) -# TODO(picnixz): once we have a HACL* HMAC, we should also test the Python -# implementation of HMAC with a HACL*-based hash function. For now, we only -# test it partially via the '_sha2' module, but for completeness we could -# also test the RFC test vectors against all possible implementations. - - class DigestModTestCaseMixin(CreatorMixin, DigestMixin): """Tests for the 'digestmod' parameter for hmac_new() and hmac_digest().""" diff --git a/Lib/test/test_httpservers.py b/Lib/test/test_httpservers.py index 2cafa4e45a1..df5d2a7bedc 100644 --- a/Lib/test/test_httpservers.py +++ b/Lib/test/test_httpservers.py @@ -3,16 +3,15 @@ Written by Cody A.W. Somerville <cody-somerville@ubuntu.com>, Josip Dzolonga, and Michael Otteneder for the 2007/08 GHOP contest. """ -from collections import OrderedDict + from http.server import BaseHTTPRequestHandler, HTTPServer, HTTPSServer, \ - SimpleHTTPRequestHandler, CGIHTTPRequestHandler + SimpleHTTPRequestHandler from http import server, HTTPStatus import os import socket import sys import re -import base64 import ntpath import pathlib import shutil @@ -31,7 +30,7 @@ from io import BytesIO, StringIO import unittest from test import support from test.support import ( - is_apple, import_helper, os_helper, requires_subprocess, threading_helper + is_apple, import_helper, os_helper, threading_helper ) try: @@ -522,42 +521,120 @@ class SimpleHTTPServerTestCase(BaseTestCase): reader.close() return body + def check_list_dir_dirname(self, dirname, quotedname=None): + fullpath = os.path.join(self.tempdir, dirname) + try: + os.mkdir(os.path.join(self.tempdir, dirname)) + except (OSError, UnicodeEncodeError): + self.skipTest(f'Can not create directory {dirname!a} ' + f'on current file system') + + if quotedname is None: + quotedname = urllib.parse.quote(dirname, errors='surrogatepass') + response = self.request(self.base_url + '/' + quotedname + '/') + body = self.check_status_and_reason(response, HTTPStatus.OK) + displaypath = html.escape(f'{self.base_url}/{dirname}/', quote=False) + enc = sys.getfilesystemencoding() + prefix = f'listing for {displaypath}</'.encode(enc, 'surrogateescape') + self.assertIn(prefix + b'title>', body) + self.assertIn(prefix + b'h1>', body) + + def check_list_dir_filename(self, filename): + fullpath = os.path.join(self.tempdir, filename) + content = ascii(fullpath).encode() + (os_helper.TESTFN_UNDECODABLE or b'\xff') + try: + with open(fullpath, 'wb') as f: + f.write(content) + except OSError: + self.skipTest(f'Can not create file {filename!a} ' + f'on current file system') + + response = self.request(self.base_url + '/') + body = self.check_status_and_reason(response, HTTPStatus.OK) + quotedname = urllib.parse.quote(filename, errors='surrogatepass') + enc = response.headers.get_content_charset() + self.assertIsNotNone(enc) + self.assertIn((f'href="{quotedname}"').encode('ascii'), body) + displayname = html.escape(filename, quote=False) + self.assertIn(f'>{displayname}<'.encode(enc, 'surrogateescape'), body) + + response = self.request(self.base_url + '/' + quotedname) + self.check_status_and_reason(response, HTTPStatus.OK, data=content) + + @unittest.skipUnless(os_helper.TESTFN_NONASCII, + 'need os_helper.TESTFN_NONASCII') + def test_list_dir_nonascii_dirname(self): + dirname = os_helper.TESTFN_NONASCII + '.dir' + self.check_list_dir_dirname(dirname) + + @unittest.skipUnless(os_helper.TESTFN_NONASCII, + 'need os_helper.TESTFN_NONASCII') + def test_list_dir_nonascii_filename(self): + filename = os_helper.TESTFN_NONASCII + '.txt' + self.check_list_dir_filename(filename) + @unittest.skipIf(is_apple, 'undecodable name cannot always be decoded on Apple platforms') @unittest.skipIf(sys.platform == 'win32', 'undecodable name cannot be decoded on win32') @unittest.skipUnless(os_helper.TESTFN_UNDECODABLE, 'need os_helper.TESTFN_UNDECODABLE') - def test_undecodable_filename(self): - enc = sys.getfilesystemencoding() - filename = os.fsdecode(os_helper.TESTFN_UNDECODABLE) + '.txt' - with open(os.path.join(self.tempdir, filename), 'wb') as f: - f.write(os_helper.TESTFN_UNDECODABLE) - response = self.request(self.base_url + '/') - if is_apple: - # On Apple platforms the HFS+ filesystem replaces bytes that - # aren't valid UTF-8 into a percent-encoded value. - for name in os.listdir(self.tempdir): - if name != 'test': # Ignore a filename created in setUp(). - filename = name - break - body = self.check_status_and_reason(response, HTTPStatus.OK) - quotedname = urllib.parse.quote(filename, errors='surrogatepass') - self.assertIn(('href="%s"' % quotedname) - .encode(enc, 'surrogateescape'), body) - self.assertIn(('>%s<' % html.escape(filename, quote=False)) - .encode(enc, 'surrogateescape'), body) - response = self.request(self.base_url + '/' + quotedname) - self.check_status_and_reason(response, HTTPStatus.OK, - data=os_helper.TESTFN_UNDECODABLE) + def test_list_dir_undecodable_dirname(self): + dirname = os.fsdecode(os_helper.TESTFN_UNDECODABLE) + '.dir' + self.check_list_dir_dirname(dirname) - def test_undecodable_parameter(self): - # sanity check using a valid parameter + @unittest.skipIf(is_apple, + 'undecodable name cannot always be decoded on Apple platforms') + @unittest.skipIf(sys.platform == 'win32', + 'undecodable name cannot be decoded on win32') + @unittest.skipUnless(os_helper.TESTFN_UNDECODABLE, + 'need os_helper.TESTFN_UNDECODABLE') + def test_list_dir_undecodable_filename(self): + filename = os.fsdecode(os_helper.TESTFN_UNDECODABLE) + '.txt' + self.check_list_dir_filename(filename) + + def test_list_dir_undecodable_dirname2(self): + dirname = '\ufffd.dir' + self.check_list_dir_dirname(dirname, quotedname='%ff.dir') + + @unittest.skipUnless(os_helper.TESTFN_UNENCODABLE, + 'need os_helper.TESTFN_UNENCODABLE') + def test_list_dir_unencodable_dirname(self): + dirname = os_helper.TESTFN_UNENCODABLE + '.dir' + self.check_list_dir_dirname(dirname) + + @unittest.skipUnless(os_helper.TESTFN_UNENCODABLE, + 'need os_helper.TESTFN_UNENCODABLE') + def test_list_dir_unencodable_filename(self): + filename = os_helper.TESTFN_UNENCODABLE + '.txt' + self.check_list_dir_filename(filename) + + def test_list_dir_escape_dirname(self): + # Characters that need special treating in URL or HTML. + for name in ('q?', 'f#', '&', '&', '<i>', '"dq"', "'sq'", + '%A4', '%E2%82%AC'): + with self.subTest(name=name): + dirname = name + '.dir' + self.check_list_dir_dirname(dirname, + quotedname=urllib.parse.quote(dirname, safe='&<>\'"')) + + def test_list_dir_escape_filename(self): + # Characters that need special treating in URL or HTML. + for name in ('q?', 'f#', '&', '&', '<i>', '"dq"', "'sq'", + '%A4', '%E2%82%AC'): + with self.subTest(name=name): + filename = name + '.txt' + self.check_list_dir_filename(filename) + os_helper.unlink(os.path.join(self.tempdir, filename)) + + def test_list_dir_with_query_and_fragment(self): + prefix = f'listing for {self.base_url}/</'.encode('latin1') + response = self.request(self.base_url + '/#123').read() + self.assertIn(prefix + b'title>', response) + self.assertIn(prefix + b'h1>', response) response = self.request(self.base_url + '/?x=123').read() - self.assertRegex(response, rf'listing for {self.base_url}/\?x=123'.encode('latin1')) - # now the bogus encoding - response = self.request(self.base_url + '/?x=%bb').read() - self.assertRegex(response, rf'listing for {self.base_url}/\?x=\xef\xbf\xbd'.encode('latin1')) + self.assertIn(prefix + b'title>', response) + self.assertIn(prefix + b'h1>', response) def test_get_dir_redirect_location_domain_injection_bug(self): """Ensure //evil.co/..%2f../../X does not put //evil.co/ in Location. @@ -615,10 +692,19 @@ class SimpleHTTPServerTestCase(BaseTestCase): # check for trailing "/" which should return 404. See Issue17324 response = self.request(self.base_url + '/test/') self.check_status_and_reason(response, HTTPStatus.NOT_FOUND) + response = self.request(self.base_url + '/test%2f') + self.check_status_and_reason(response, HTTPStatus.NOT_FOUND) + response = self.request(self.base_url + '/test%2F') + self.check_status_and_reason(response, HTTPStatus.NOT_FOUND) response = self.request(self.base_url + '/') self.check_status_and_reason(response, HTTPStatus.OK) + response = self.request(self.base_url + '%2f') + self.check_status_and_reason(response, HTTPStatus.OK) + response = self.request(self.base_url + '%2F') + self.check_status_and_reason(response, HTTPStatus.OK) response = self.request(self.base_url) self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY) + self.assertEqual(response.getheader("Location"), self.base_url + "/") self.assertEqual(response.getheader("Content-Length"), "0") response = self.request(self.base_url + '/?hi=2') self.check_status_and_reason(response, HTTPStatus.OK) @@ -724,6 +810,8 @@ class SimpleHTTPServerTestCase(BaseTestCase): self.check_status_and_reason(response, HTTPStatus.OK) response = self.request(self.tempdir_name) self.check_status_and_reason(response, HTTPStatus.MOVED_PERMANENTLY) + self.assertEqual(response.getheader("Location"), + self.tempdir_name + "/") response = self.request(self.tempdir_name + '/?hi=2') self.check_status_and_reason(response, HTTPStatus.OK) response = self.request(self.tempdir_name + '?hi=1') @@ -731,350 +819,6 @@ class SimpleHTTPServerTestCase(BaseTestCase): self.assertEqual(response.getheader("Location"), self.tempdir_name + "/?hi=1") - def test_html_escape_filename(self): - filename = '<test&>.txt' - fullpath = os.path.join(self.tempdir, filename) - - try: - open(fullpath, 'wb').close() - except OSError: - raise unittest.SkipTest('Can not create file %s on current file ' - 'system' % filename) - - try: - response = self.request(self.base_url + '/') - body = self.check_status_and_reason(response, HTTPStatus.OK) - enc = response.headers.get_content_charset() - finally: - os.unlink(fullpath) # avoid affecting test_undecodable_filename - - self.assertIsNotNone(enc) - html_text = '>%s<' % html.escape(filename, quote=False) - self.assertIn(html_text.encode(enc), body) - - -cgi_file1 = """\ -#!%s - -print("Content-type: text/html") -print() -print("Hello World") -""" - -cgi_file2 = """\ -#!%s -import os -import sys -import urllib.parse - -print("Content-type: text/html") -print() - -content_length = int(os.environ["CONTENT_LENGTH"]) -query_string = sys.stdin.buffer.read(content_length) -params = {key.decode("utf-8"): val.decode("utf-8") - for key, val in urllib.parse.parse_qsl(query_string)} - -print("%%s, %%s, %%s" %% (params["spam"], params["eggs"], params["bacon"])) -""" - -cgi_file4 = """\ -#!%s -import os - -print("Content-type: text/html") -print() - -print(os.environ["%s"]) -""" - -cgi_file6 = """\ -#!%s -import os - -print("X-ambv: was here") -print("Content-type: text/html") -print() -print("<pre>") -for k, v in os.environ.items(): - try: - k.encode('ascii') - v.encode('ascii') - except UnicodeEncodeError: - continue # see: BPO-44647 - print(f"{k}={v}") -print("</pre>") -""" - - -@unittest.skipIf(hasattr(os, 'geteuid') and os.geteuid() == 0, - "This test can't be run reliably as root (issue #13308).") -@requires_subprocess() -class CGIHTTPServerTestCase(BaseTestCase): - class request_handler(NoLogRequestHandler, CGIHTTPRequestHandler): - _test_case_self = None # populated by each setUp() method call. - - def __init__(self, *args, **kwargs): - with self._test_case_self.assertWarnsRegex( - DeprecationWarning, - r'http\.server\.CGIHTTPRequestHandler'): - # This context also happens to catch and silence the - # threading DeprecationWarning from os.fork(). - super().__init__(*args, **kwargs) - - linesep = os.linesep.encode('ascii') - - def setUp(self): - self.request_handler._test_case_self = self # practical, but yuck. - BaseTestCase.setUp(self) - self.cwd = os.getcwd() - self.parent_dir = tempfile.mkdtemp() - self.cgi_dir = os.path.join(self.parent_dir, 'cgi-bin') - self.cgi_child_dir = os.path.join(self.cgi_dir, 'child-dir') - self.sub_dir_1 = os.path.join(self.parent_dir, 'sub') - self.sub_dir_2 = os.path.join(self.sub_dir_1, 'dir') - self.cgi_dir_in_sub_dir = os.path.join(self.sub_dir_2, 'cgi-bin') - os.mkdir(self.cgi_dir) - os.mkdir(self.cgi_child_dir) - os.mkdir(self.sub_dir_1) - os.mkdir(self.sub_dir_2) - os.mkdir(self.cgi_dir_in_sub_dir) - self.nocgi_path = None - self.file1_path = None - self.file2_path = None - self.file3_path = None - self.file4_path = None - self.file5_path = None - - # The shebang line should be pure ASCII: use symlink if possible. - # See issue #7668. - self._pythonexe_symlink = None - if os_helper.can_symlink(): - self.pythonexe = os.path.join(self.parent_dir, 'python') - self._pythonexe_symlink = support.PythonSymlink(self.pythonexe).__enter__() - else: - self.pythonexe = sys.executable - - try: - # The python executable path is written as the first line of the - # CGI Python script. The encoding cookie cannot be used, and so the - # path should be encodable to the default script encoding (utf-8) - self.pythonexe.encode('utf-8') - except UnicodeEncodeError: - self.tearDown() - self.skipTest("Python executable path is not encodable to utf-8") - - self.nocgi_path = os.path.join(self.parent_dir, 'nocgi.py') - with open(self.nocgi_path, 'w', encoding='utf-8') as fp: - fp.write(cgi_file1 % self.pythonexe) - os.chmod(self.nocgi_path, 0o777) - - self.file1_path = os.path.join(self.cgi_dir, 'file1.py') - with open(self.file1_path, 'w', encoding='utf-8') as file1: - file1.write(cgi_file1 % self.pythonexe) - os.chmod(self.file1_path, 0o777) - - self.file2_path = os.path.join(self.cgi_dir, 'file2.py') - with open(self.file2_path, 'w', encoding='utf-8') as file2: - file2.write(cgi_file2 % self.pythonexe) - os.chmod(self.file2_path, 0o777) - - self.file3_path = os.path.join(self.cgi_child_dir, 'file3.py') - with open(self.file3_path, 'w', encoding='utf-8') as file3: - file3.write(cgi_file1 % self.pythonexe) - os.chmod(self.file3_path, 0o777) - - self.file4_path = os.path.join(self.cgi_dir, 'file4.py') - with open(self.file4_path, 'w', encoding='utf-8') as file4: - file4.write(cgi_file4 % (self.pythonexe, 'QUERY_STRING')) - os.chmod(self.file4_path, 0o777) - - self.file5_path = os.path.join(self.cgi_dir_in_sub_dir, 'file5.py') - with open(self.file5_path, 'w', encoding='utf-8') as file5: - file5.write(cgi_file1 % self.pythonexe) - os.chmod(self.file5_path, 0o777) - - self.file6_path = os.path.join(self.cgi_dir, 'file6.py') - with open(self.file6_path, 'w', encoding='utf-8') as file6: - file6.write(cgi_file6 % self.pythonexe) - os.chmod(self.file6_path, 0o777) - - os.chdir(self.parent_dir) - - def tearDown(self): - self.request_handler._test_case_self = None - try: - os.chdir(self.cwd) - if self._pythonexe_symlink: - self._pythonexe_symlink.__exit__(None, None, None) - if self.nocgi_path: - os.remove(self.nocgi_path) - if self.file1_path: - os.remove(self.file1_path) - if self.file2_path: - os.remove(self.file2_path) - if self.file3_path: - os.remove(self.file3_path) - if self.file4_path: - os.remove(self.file4_path) - if self.file5_path: - os.remove(self.file5_path) - if self.file6_path: - os.remove(self.file6_path) - os.rmdir(self.cgi_child_dir) - os.rmdir(self.cgi_dir) - os.rmdir(self.cgi_dir_in_sub_dir) - os.rmdir(self.sub_dir_2) - os.rmdir(self.sub_dir_1) - # The 'gmon.out' file can be written in the current working - # directory if C-level code profiling with gprof is enabled. - os_helper.unlink(os.path.join(self.parent_dir, 'gmon.out')) - os.rmdir(self.parent_dir) - finally: - BaseTestCase.tearDown(self) - - def test_url_collapse_path(self): - # verify tail is the last portion and head is the rest on proper urls - test_vectors = { - '': '//', - '..': IndexError, - '/.//..': IndexError, - '/': '//', - '//': '//', - '/\\': '//\\', - '/.//': '//', - 'cgi-bin/file1.py': '/cgi-bin/file1.py', - '/cgi-bin/file1.py': '/cgi-bin/file1.py', - 'a': '//a', - '/a': '//a', - '//a': '//a', - './a': '//a', - './C:/': '/C:/', - '/a/b': '/a/b', - '/a/b/': '/a/b/', - '/a/b/.': '/a/b/', - '/a/b/c/..': '/a/b/', - '/a/b/c/../d': '/a/b/d', - '/a/b/c/../d/e/../f': '/a/b/d/f', - '/a/b/c/../d/e/../../f': '/a/b/f', - '/a/b/c/../d/e/.././././..//f': '/a/b/f', - '../a/b/c/../d/e/.././././..//f': IndexError, - '/a/b/c/../d/e/../../../f': '/a/f', - '/a/b/c/../d/e/../../../../f': '//f', - '/a/b/c/../d/e/../../../../../f': IndexError, - '/a/b/c/../d/e/../../../../f/..': '//', - '/a/b/c/../d/e/../../../../f/../.': '//', - } - for path, expected in test_vectors.items(): - if isinstance(expected, type) and issubclass(expected, Exception): - self.assertRaises(expected, - server._url_collapse_path, path) - else: - actual = server._url_collapse_path(path) - self.assertEqual(expected, actual, - msg='path = %r\nGot: %r\nWanted: %r' % - (path, actual, expected)) - - def test_headers_and_content(self): - res = self.request('/cgi-bin/file1.py') - self.assertEqual( - (res.read(), res.getheader('Content-type'), res.status), - (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK)) - - def test_issue19435(self): - res = self.request('///////////nocgi.py/../cgi-bin/nothere.sh') - self.assertEqual(res.status, HTTPStatus.NOT_FOUND) - - def test_post(self): - params = urllib.parse.urlencode( - {'spam' : 1, 'eggs' : 'python', 'bacon' : 123456}) - headers = {'Content-type' : 'application/x-www-form-urlencoded'} - res = self.request('/cgi-bin/file2.py', 'POST', params, headers) - - self.assertEqual(res.read(), b'1, python, 123456' + self.linesep) - - def test_invaliduri(self): - res = self.request('/cgi-bin/invalid') - res.read() - self.assertEqual(res.status, HTTPStatus.NOT_FOUND) - - def test_authorization(self): - headers = {b'Authorization' : b'Basic ' + - base64.b64encode(b'username:pass')} - res = self.request('/cgi-bin/file1.py', 'GET', headers=headers) - self.assertEqual( - (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - - def test_no_leading_slash(self): - # http://bugs.python.org/issue2254 - res = self.request('cgi-bin/file1.py') - self.assertEqual( - (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - - def test_os_environ_is_not_altered(self): - signature = "Test CGI Server" - os.environ['SERVER_SOFTWARE'] = signature - res = self.request('/cgi-bin/file1.py') - self.assertEqual( - (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - self.assertEqual(os.environ['SERVER_SOFTWARE'], signature) - - def test_urlquote_decoding_in_cgi_check(self): - res = self.request('/cgi-bin%2ffile1.py') - self.assertEqual( - (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - - def test_nested_cgi_path_issue21323(self): - res = self.request('/cgi-bin/child-dir/file3.py') - self.assertEqual( - (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - - def test_query_with_multiple_question_mark(self): - res = self.request('/cgi-bin/file4.py?a=b?c=d') - self.assertEqual( - (b'a=b?c=d' + self.linesep, 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - - def test_query_with_continuous_slashes(self): - res = self.request('/cgi-bin/file4.py?k=aa%2F%2Fbb&//q//p//=//a//b//') - self.assertEqual( - (b'k=aa%2F%2Fbb&//q//p//=//a//b//' + self.linesep, - 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - - def test_cgi_path_in_sub_directories(self): - try: - CGIHTTPRequestHandler.cgi_directories.append('/sub/dir/cgi-bin') - res = self.request('/sub/dir/cgi-bin/file5.py') - self.assertEqual( - (b'Hello World' + self.linesep, 'text/html', HTTPStatus.OK), - (res.read(), res.getheader('Content-type'), res.status)) - finally: - CGIHTTPRequestHandler.cgi_directories.remove('/sub/dir/cgi-bin') - - def test_accept(self): - browser_accept = \ - 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8' - tests = ( - ((('Accept', browser_accept),), browser_accept), - ((), ''), - # Hack case to get two values for the one header - ((('Accept', 'text/html'), ('ACCEPT', 'text/plain')), - 'text/html,text/plain'), - ) - for headers, expected in tests: - headers = OrderedDict(headers) - with self.subTest(headers): - res = self.request('/cgi-bin/file6.py', 'GET', headers=headers) - self.assertEqual(http.HTTPStatus.OK, res.status) - expected = f"HTTP_ACCEPT={expected}".encode('ascii') - self.assertIn(expected, res.read()) - class SocketlessRequestHandler(SimpleHTTPRequestHandler): def __init__(self, directory=None): @@ -1095,6 +839,7 @@ class SocketlessRequestHandler(SimpleHTTPRequestHandler): def log_message(self, format, *args): pass + class RejectingSocketlessRequestHandler(SocketlessRequestHandler): def handle_expect_100(self): self.send_error(HTTPStatus.EXPECTATION_FAILED) diff --git a/Lib/test/test_importlib/import_/test_relative_imports.py b/Lib/test/test_importlib/import_/test_relative_imports.py index e535d119763..1549cbe96ce 100644 --- a/Lib/test/test_importlib/import_/test_relative_imports.py +++ b/Lib/test/test_importlib/import_/test_relative_imports.py @@ -223,6 +223,21 @@ class RelativeImports: self.__import__('sys', {'__package__': '', '__spec__': None}, level=1) + def test_malicious_relative_import(self): + # https://github.com/python/cpython/issues/134100 + # Test to make sure UAF bug with error msg doesn't come back to life + import sys + loooong = "".ljust(0x23000, "b") + name = f"a.{loooong}.c" + + with util.uncache(name): + sys.modules[name] = {} + with self.assertRaisesRegex( + KeyError, + r"'a\.b+' not in sys\.modules as expected" + ): + __import__(f"{loooong}.c", {"__package__": "a"}, level=1) + (Frozen_RelativeImports, Source_RelativeImports diff --git a/Lib/test/test_interpreters/test_api.py b/Lib/test/test_interpreters/test_api.py index 66c7afce88f..1e2d572b1cb 100644 --- a/Lib/test/test_interpreters/test_api.py +++ b/Lib/test/test_interpreters/test_api.py @@ -1452,6 +1452,14 @@ class LowLevelTests(TestBase): self.assertFalse( self.interp_exists(interpid)) + with self.subTest('basic C-API'): + interpid = _testinternalcapi.create_interpreter() + self.assertTrue( + self.interp_exists(interpid)) + _testinternalcapi.destroy_interpreter(interpid, basic=True) + self.assertFalse( + self.interp_exists(interpid)) + def test_get_config(self): # This test overlaps with # test.test_capi.test_misc.InterpreterConfigTests. diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 1e5adcc8db1..fa5b1e43816 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -61,7 +61,7 @@ import warnings import weakref from http.server import HTTPServer, BaseHTTPRequestHandler -from unittest.mock import patch +from unittest.mock import call, Mock, patch from urllib.parse import urlparse, parse_qs from socketserver import (ThreadingUDPServer, DatagramRequestHandler, ThreadingTCPServer, StreamRequestHandler) @@ -5655,12 +5655,19 @@ class BasicConfigTest(unittest.TestCase): assertRaises = self.assertRaises handlers = [logging.StreamHandler()] stream = sys.stderr + formatter = logging.Formatter() assertRaises(ValueError, logging.basicConfig, filename='test.log', stream=stream) assertRaises(ValueError, logging.basicConfig, filename='test.log', handlers=handlers) assertRaises(ValueError, logging.basicConfig, stream=stream, handlers=handlers) + assertRaises(ValueError, logging.basicConfig, formatter=formatter, + format='%(message)s') + assertRaises(ValueError, logging.basicConfig, formatter=formatter, + datefmt='%H:%M:%S') + assertRaises(ValueError, logging.basicConfig, formatter=formatter, + style='%') # Issue 23207: test for invalid kwargs assertRaises(ValueError, logging.basicConfig, loglevel=logging.INFO) # Should pop both filename and filemode even if filename is None @@ -5795,6 +5802,20 @@ class BasicConfigTest(unittest.TestCase): # didn't write anything due to the encoding error self.assertEqual(data, r'') + def test_formatter_given(self): + mock_formatter = Mock() + mock_handler = Mock(formatter=None) + with patch("logging.Formatter") as mock_formatter_init: + logging.basicConfig(formatter=mock_formatter, handlers=[mock_handler]) + self.assertEqual(mock_handler.setFormatter.call_args_list, [call(mock_formatter)]) + self.assertEqual(mock_formatter_init.call_count, 0) + + def test_formatter_not_given(self): + mock_handler = Mock(formatter=None) + with patch("logging.Formatter") as mock_formatter_init: + logging.basicConfig(handlers=[mock_handler]) + self.assertEqual(mock_formatter_init.call_count, 1) + @support.requires_working_socket() def test_log_taskName(self): async def log_record(): diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index 818e807dd3a..3b673a47c8c 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -383,15 +383,6 @@ class PlatformTest(unittest.TestCase): finally: platform._uname_cache = None - def test_java_ver(self): - import re - msg = re.escape( - "'java_ver' is deprecated and slated for removal in Python 3.15" - ) - with self.assertWarnsRegex(DeprecationWarning, msg): - res = platform.java_ver() - self.assertEqual(len(res), 4) - @unittest.skipUnless(support.MS_WINDOWS, 'This test only makes sense on Windows') def test_win32_ver(self): release1, version1, csd1, ptype1 = 'a', 'b', 'c', 'd' diff --git a/Lib/test/test_string/test_templatelib.py b/Lib/test/test_string/test_templatelib.py index 5b9490c2be6..85fcff486d6 100644 --- a/Lib/test/test_string/test_templatelib.py +++ b/Lib/test/test_string/test_templatelib.py @@ -148,6 +148,13 @@ class TemplateIterTests(unittest.TestCase): self.assertEqual(res[1].format_spec, '') self.assertEqual(res[2], ' yz') + def test_exhausted(self): + # See https://github.com/python/cpython/issues/134119. + template_iter = iter(t"{1}") + self.assertIsInstance(next(template_iter), Interpolation) + self.assertRaises(StopIteration, next, template_iter) + self.assertRaises(StopIteration, next, template_iter) + if __name__ == '__main__': unittest.main() diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 0eccf03a1a9..c7ac7914158 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -1431,6 +1431,23 @@ Better error message for using `except as` with not a name: Traceback (most recent call last): SyntaxError: cannot use except* statement with literal +Regression tests for gh-133999: + + >>> try: pass + ... except TypeError as name: raise from None + Traceback (most recent call last): + SyntaxError: invalid syntax + + >>> try: pass + ... except* TypeError as name: raise from None + Traceback (most recent call last): + SyntaxError: invalid syntax + + >>> match 1: + ... case 1 | 2 as abc: raise from None + Traceback (most recent call last): + SyntaxError: invalid syntax + Ensure that early = are not matched by the parser as invalid comparisons >>> f(2, 4, x=34); 1 $ 2 Traceback (most recent call last): diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 8af2e3488b4..fb1c8492a64 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -2176,6 +2176,13 @@ raise Exception("Remote script exception") with self.assertRaises(OSError): sys.remote_exec(99999, "print('should not run')") + def test_remote_exec_invalid_script(self): + """Test remote exec with invalid script type""" + with self.assertRaises(TypeError): + sys.remote_exec(0, None) + with self.assertRaises(TypeError): + sys.remote_exec(0, 123) + def test_remote_exec_syntax_error(self): """Test remote exec with syntax error in script""" script = ''' diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py index 2d9649237a9..2018a20afd1 100644 --- a/Lib/test/test_tarfile.py +++ b/Lib/test/test_tarfile.py @@ -3490,11 +3490,12 @@ class ArchiveMaker: with t.open() as tar: ... # `tar` is now a TarFile with 'filename' in it! """ - def __init__(self): + def __init__(self, **kwargs): self.bio = io.BytesIO() + self.tar_kwargs = dict(kwargs) def __enter__(self): - self.tar_w = tarfile.TarFile(mode='w', fileobj=self.bio) + self.tar_w = tarfile.TarFile(mode='w', fileobj=self.bio, **self.tar_kwargs) return self def __exit__(self, *exc): @@ -4073,7 +4074,10 @@ class TestExtractionFilters(unittest.TestCase): # that in the test archive.) with tarfile.TarFile.open(tarname) as tar: for tarinfo in tar.getmembers(): - filtered = tarfile.tar_filter(tarinfo, '') + try: + filtered = tarfile.tar_filter(tarinfo, '') + except UnicodeEncodeError: + continue self.assertIs(filtered.name, tarinfo.name) self.assertIs(filtered.type, tarinfo.type) @@ -4084,11 +4088,48 @@ class TestExtractionFilters(unittest.TestCase): for tarinfo in tar.getmembers(): try: filtered = tarfile.data_filter(tarinfo, '') - except tarfile.FilterError: + except (tarfile.FilterError, UnicodeEncodeError): continue self.assertIs(filtered.name, tarinfo.name) self.assertIs(filtered.type, tarinfo.type) + @unittest.skipIf(sys.platform == 'win32', 'requires native bytes paths') + def test_filter_unencodable(self): + # Sanity check using a valid path. + tarinfo = tarfile.TarInfo(os_helper.TESTFN) + filtered = tarfile.tar_filter(tarinfo, '') + self.assertIs(filtered.name, tarinfo.name) + filtered = tarfile.data_filter(tarinfo, '') + self.assertIs(filtered.name, tarinfo.name) + + tarinfo = tarfile.TarInfo('test\x00') + self.assertRaises(ValueError, tarfile.tar_filter, tarinfo, '') + self.assertRaises(ValueError, tarfile.data_filter, tarinfo, '') + tarinfo = tarfile.TarInfo('\ud800') + self.assertRaises(UnicodeEncodeError, tarfile.tar_filter, tarinfo, '') + self.assertRaises(UnicodeEncodeError, tarfile.data_filter, tarinfo, '') + + @unittest.skipIf(sys.platform == 'win32', 'requires native bytes paths') + def test_extract_unencodable(self): + # Create a member with name \xed\xa0\x80 which is UTF-8 encoded + # lone surrogate \ud800. + with ArchiveMaker(encoding='ascii', errors='surrogateescape') as arc: + arc.add('\udced\udca0\udc80') + with os_helper.temp_cwd() as tmp: + tar = arc.open(encoding='utf-8', errors='surrogatepass', + errorlevel=1) + self.assertEqual(tar.getnames(), ['\ud800']) + with self.assertRaises(UnicodeEncodeError): + tar.extractall() + self.assertEqual(os.listdir(), []) + + tar = arc.open(encoding='utf-8', errors='surrogatepass', + errorlevel=0, debug=1) + with support.captured_stderr() as stderr: + tar.extractall() + self.assertEqual(os.listdir(), []) + self.assertIn('tarfile: UnicodeEncodeError ', stderr.getvalue()) + def test_change_default_filter_on_instance(self): tar = tarfile.TarFile(tarname, 'r') def strict_filter(tarinfo, path): diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py index 6ef633e4545..246be22a0d8 100644 --- a/Lib/test/test_typing.py +++ b/Lib/test/test_typing.py @@ -8487,6 +8487,36 @@ class TypedDictTests(BaseTestCase): self.assertEqual(Child.__required_keys__, frozenset(['a'])) self.assertEqual(Child.__optional_keys__, frozenset()) + def test_inheritance_pep563(self): + def _make_td(future, class_name, annos, base, extra_names=None): + lines = [] + if future: + lines.append('from __future__ import annotations') + lines.append('from typing import TypedDict') + lines.append(f'class {class_name}({base}):') + for name, anno in annos.items(): + lines.append(f' {name}: {anno}') + code = '\n'.join(lines) + ns = run_code(code, extra_names) + return ns[class_name] + + for base_future in (True, False): + for child_future in (True, False): + with self.subTest(base_future=base_future, child_future=child_future): + base = _make_td( + base_future, "Base", {"base": "int"}, "TypedDict" + ) + self.assertIsNotNone(base.__annotate__) + child = _make_td( + child_future, "Child", {"child": "int"}, "Base", {"Base": base} + ) + base_anno = ForwardRef("int", module="builtins") if base_future else int + child_anno = ForwardRef("int", module="builtins") if child_future else int + self.assertEqual(base.__annotations__, {'base': base_anno}) + self.assertEqual( + child.__annotations__, {'child': child_anno, 'base': base_anno} + ) + def test_required_notrequired_keys(self): self.assertEqual(NontotalMovie.__required_keys__, frozenset({"title"})) diff --git a/Lib/test/test_wave.py b/Lib/test/test_wave.py index 5e771c8de96..6c3362857fc 100644 --- a/Lib/test/test_wave.py +++ b/Lib/test/test_wave.py @@ -136,32 +136,6 @@ class MiscTestCase(unittest.TestCase): not_exported = {'WAVE_FORMAT_PCM', 'WAVE_FORMAT_EXTENSIBLE', 'KSDATAFORMAT_SUBTYPE_PCM'} support.check__all__(self, wave, not_exported=not_exported) - def test_read_deprecations(self): - filename = support.findfile('pluck-pcm8.wav', subdir='audiodata') - with wave.open(filename) as reader: - with self.assertWarns(DeprecationWarning): - with self.assertRaises(wave.Error): - reader.getmark('mark') - with self.assertWarns(DeprecationWarning): - self.assertIsNone(reader.getmarkers()) - - def test_write_deprecations(self): - with io.BytesIO(b'') as tmpfile: - with wave.open(tmpfile, 'wb') as writer: - writer.setnchannels(1) - writer.setsampwidth(1) - writer.setframerate(1) - writer.setcomptype('NONE', 'not compressed') - - with self.assertWarns(DeprecationWarning): - with self.assertRaises(wave.Error): - writer.setmark(0, 0, 'mark') - with self.assertWarns(DeprecationWarning): - with self.assertRaises(wave.Error): - writer.getmark('mark') - with self.assertWarns(DeprecationWarning): - self.assertIsNone(writer.getmarkers()) - class WaveLowLevelTest(unittest.TestCase): diff --git a/Lib/typing.py b/Lib/typing.py index 3d64480e143..98af61be8b0 100644 --- a/Lib/typing.py +++ b/Lib/typing.py @@ -3048,14 +3048,16 @@ class _TypedDictMeta(type): else: generic_base = () + ns_annotations = ns.pop('__annotations__', None) + tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns) if not hasattr(tp_dict, '__orig_bases__'): tp_dict.__orig_bases__ = bases - if "__annotations__" in ns: + if ns_annotations is not None: own_annotate = None - own_annotations = ns["__annotations__"] + own_annotations = ns_annotations elif (own_annotate := _lazy_annotationlib.get_annotate_from_class_namespace(ns)) is not None: own_annotations = _lazy_annotationlib.call_annotate_function( own_annotate, _lazy_annotationlib.Format.FORWARDREF, owner=tp_dict @@ -3126,7 +3128,7 @@ class _TypedDictMeta(type): if base_annotate is None: continue base_annos = _lazy_annotationlib.call_annotate_function( - base.__annotate__, format, owner=base) + base_annotate, format, owner=base) annos.update(base_annos) if own_annotate is not None: own = _lazy_annotationlib.call_annotate_function( diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py index 15e15b7a518..dc9c5991df7 100644 --- a/Lib/venv/__init__.py +++ b/Lib/venv/__init__.py @@ -313,11 +313,8 @@ class EnvBuilder: copier(context.executable, path) if not os.path.islink(path): os.chmod(path, 0o755) - - suffixes = ['python', 'python3', f'python3.{sys.version_info[1]}'] - if sys.version_info[:2] == (3, 14): - suffixes.append('𝜋thon') - for suffix in suffixes: + for suffix in ('python', 'python3', + f'python3.{sys.version_info[1]}'): path = os.path.join(binpath, suffix) if not os.path.exists(path): # Issue 18807: make copies if diff --git a/Lib/wave.py b/Lib/wave.py index a34af244c3e..929609fa524 100644 --- a/Lib/wave.py +++ b/Lib/wave.py @@ -20,10 +20,6 @@ This returns an instance of a class with the following public methods: compression type ('not compressed' linear samples) getparams() -- returns a namedtuple consisting of all of the above in the above order - getmarkers() -- returns None (for compatibility with the - old aifc module) - getmark(id) -- raises an error since the mark does not - exist (for compatibility with the old aifc module) readframes(n) -- returns at most n frames of audio rewind() -- rewind to the beginning of the audio stream setpos(pos) -- seek to the specified position @@ -341,16 +337,6 @@ class Wave_read: self.getframerate(), self.getnframes(), self.getcomptype(), self.getcompname()) - def getmarkers(self): - import warnings - warnings._deprecated("Wave_read.getmarkers", remove=(3, 15)) - return None - - def getmark(self, id): - import warnings - warnings._deprecated("Wave_read.getmark", remove=(3, 15)) - raise Error('no marks') - def setpos(self, pos): if pos < 0 or pos > self._nframes: raise Error('position not in range') @@ -551,21 +537,6 @@ class Wave_write: return _wave_params(self._nchannels, self._sampwidth, self._framerate, self._nframes, self._comptype, self._compname) - def setmark(self, id, pos, name): - import warnings - warnings._deprecated("Wave_write.setmark", remove=(3, 15)) - raise Error('setmark() not supported') - - def getmark(self, id): - import warnings - warnings._deprecated("Wave_write.getmark", remove=(3, 15)) - raise Error('no marks') - - def getmarkers(self): - import warnings - warnings._deprecated("Wave_write.getmarkers", remove=(3, 15)) - return None - def tell(self): return self._nframeswritten diff --git a/Lib/wsgiref/handlers.py b/Lib/wsgiref/handlers.py index cafe872c7aa..9353fb67862 100644 --- a/Lib/wsgiref/handlers.py +++ b/Lib/wsgiref/handlers.py @@ -69,7 +69,8 @@ def read_environ(): # Python 3's http.server.CGIHTTPRequestHandler decodes # using the urllib.unquote default of UTF-8, amongst other - # issues. + # issues. While the CGI handler is removed in 3.15, this + # is kept for legacy reasons. elif ( software.startswith('simplehttp/') and 'python/3' in software diff --git a/Makefile.pre.in b/Makefile.pre.in index 452c4ad35ac..3ab7c3d6c48 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -2512,8 +2512,7 @@ maninstall: altmaninstall XMLLIBSUBDIRS= xml xml/dom xml/etree xml/parsers xml/sax LIBSUBDIRS= asyncio \ collections \ - compression compression/bz2 compression/gzip compression/zstd \ - compression/lzma compression/zlib compression/_common \ + compression compression/_common compression/zstd \ concurrent concurrent/futures \ csv \ ctypes ctypes/macholib \ diff --git a/Misc/NEWS.d/3.13.0a1.rst b/Misc/NEWS.d/3.13.0a1.rst index 304baf6ac8e..0a93cbcea0f 100644 --- a/Misc/NEWS.d/3.13.0a1.rst +++ b/Misc/NEWS.d/3.13.0a1.rst @@ -2294,7 +2294,7 @@ superclass. Patch by James Hilton-Balfe .. nonce: VksX1D .. section: Library -:class:`http.server.CGIHTTPRequestHandler` has been deprecated for removal +:class:`!http.server.CGIHTTPRequestHandler` has been deprecated for removal in 3.15. Its design is old and the web world has long since moved beyond CGI. diff --git a/Misc/NEWS.d/next/C_API/2025-05-17-14-41-21.gh-issue-134144.xVpZik.rst b/Misc/NEWS.d/next/C_API/2025-05-17-14-41-21.gh-issue-134144.xVpZik.rst new file mode 100644 index 00000000000..11c7bd59a4d --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-05-17-14-41-21.gh-issue-134144.xVpZik.rst @@ -0,0 +1 @@ +Fix crash when calling :c:func:`Py_EndInterpreter` with a :term:`thread state` that isn't the initial thread for the interpreter. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-15-11-38-16.gh-issue-133999.uBZ8uS.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-15-11-38-16.gh-issue-133999.uBZ8uS.rst new file mode 100644 index 00000000000..7d9c49688c3 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-15-11-38-16.gh-issue-133999.uBZ8uS.rst @@ -0,0 +1,2 @@ +Fix :exc:`SyntaxError` regression in :keyword:`except` parsing after +:gh:`123440`. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-17-25-52.gh-issue-134100.5-FbLK.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-17-25-52.gh-issue-134100.5-FbLK.rst new file mode 100644 index 00000000000..d672347f9ad --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-17-25-52.gh-issue-134100.5-FbLK.rst @@ -0,0 +1,2 @@ +Fix a use-after-free bug that occurs when an imported module isn't +in :data:`sys.modules` after its initial import. Patch by Nico-Posada. diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-20-59-12.gh-issue-134119.w8expI.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-20-59-12.gh-issue-134119.w8expI.rst new file mode 100644 index 00000000000..754e8166285 --- /dev/null +++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-16-20-59-12.gh-issue-134119.w8expI.rst @@ -0,0 +1,2 @@ +Fix crash when calling :func:`next` on an exhausted template string iterator. +Patch by Jelle Zijlstra. diff --git a/Misc/NEWS.d/next/Library/2025-04-21-01-05-14.gh-issue-127081.Egrpq7.rst b/Misc/NEWS.d/next/Library/2025-04-21-01-05-14.gh-issue-127081.Egrpq7.rst new file mode 100644 index 00000000000..30643673bf9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-04-21-01-05-14.gh-issue-127081.Egrpq7.rst @@ -0,0 +1,2 @@ +Fix libc thread safety issues with :mod:`dbm` by performing stateful +operations in critical sections. diff --git a/Misc/NEWS.d/next/Library/2025-05-07-14-36-30.gh-issue-133577.BggPk9.rst b/Misc/NEWS.d/next/Library/2025-05-07-14-36-30.gh-issue-133577.BggPk9.rst new file mode 100644 index 00000000000..9d056983439 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-07-14-36-30.gh-issue-133577.BggPk9.rst @@ -0,0 +1 @@ +Add parameter ``formatter`` to :func:`logging.basicConfig`. diff --git a/Misc/NEWS.d/next/Library/2025-05-09-08-49-03.gh-issue-133701.KI8tGz.rst b/Misc/NEWS.d/next/Library/2025-05-09-08-49-03.gh-issue-133701.KI8tGz.rst new file mode 100644 index 00000000000..163d9b331d1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-09-08-49-03.gh-issue-133701.KI8tGz.rst @@ -0,0 +1,3 @@ +Fix bug where :class:`typing.TypedDict` classes defined under ``from +__future__ import annotations`` and inheriting from another ``TypedDict`` +had an incorrect ``__annotations__`` attribute. diff --git a/Misc/NEWS.d/next/Library/2025-05-09-20-59-24.gh-issue-132641.3qTw44.rst b/Misc/NEWS.d/next/Library/2025-05-09-20-59-24.gh-issue-132641.3qTw44.rst new file mode 100644 index 00000000000..419ff19d9c0 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-09-20-59-24.gh-issue-132641.3qTw44.rst @@ -0,0 +1 @@ +Fixed a race in :func:`functools.lru_cache` under free-threading. diff --git a/Misc/NEWS.d/next/Library/2025-05-10-11-04-47.gh-issue-133810.03WhnK.rst b/Misc/NEWS.d/next/Library/2025-05-10-11-04-47.gh-issue-133810.03WhnK.rst new file mode 100644 index 00000000000..4073974e364 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-10-11-04-47.gh-issue-133810.03WhnK.rst @@ -0,0 +1,3 @@ +Remove :class:`!http.server.CGIHTTPRequestHandler` and ``--cgi`` flag from the +:program:`python -m http.server` command-line interface. They were +deprecated in Python 3.13. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-05-11-10-28-11.gh-issue-133873.H03nov.rst b/Misc/NEWS.d/next/Library/2025-05-11-10-28-11.gh-issue-133873.H03nov.rst new file mode 100644 index 00000000000..79f630b2418 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-11-10-28-11.gh-issue-133873.H03nov.rst @@ -0,0 +1,3 @@ +Remove the deprecated ``getmark()``, ``setmark()`` and ``getmarkers()`` +methods of the :class:`~wave.Wave_read` and :class:`~wave.Wave_write` +classes, which were deprecated since Python 3.13. Patch by Bénédikt Tran. diff --git a/Misc/NEWS.d/next/Library/2025-05-11-12-56-52.gh-issue-133604.kFxhc8.rst b/Misc/NEWS.d/next/Library/2025-05-11-12-56-52.gh-issue-133604.kFxhc8.rst new file mode 100644 index 00000000000..526ac38f09b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-11-12-56-52.gh-issue-133604.kFxhc8.rst @@ -0,0 +1 @@ +Remove :func:`!platform.java_ver` which was deprecated since Python 3.13. diff --git a/Misc/NEWS.d/next/Library/2025-05-13-18-54-56.gh-issue-133970.6G-Oi6.rst b/Misc/NEWS.d/next/Library/2025-05-13-18-54-56.gh-issue-133970.6G-Oi6.rst new file mode 100644 index 00000000000..ddf456d3939 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-13-18-54-56.gh-issue-133970.6G-Oi6.rst @@ -0,0 +1,2 @@ +Make :class:`!string.templatelib.Template` and +:class:`!string.templatelib.Interpolation` generic. diff --git a/Misc/NEWS.d/next/Library/2025-05-16-20-10-25.gh-issue-134098.YyTkKr.rst b/Misc/NEWS.d/next/Library/2025-05-16-20-10-25.gh-issue-134098.YyTkKr.rst new file mode 100644 index 00000000000..32eff5371c4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-16-20-10-25.gh-issue-134098.YyTkKr.rst @@ -0,0 +1,2 @@ +Fix handling paths that end with a percent-encoded slash (``%2f`` or +``%2F``) in :class:`http.server.SimpleHTTPRequestHandler`. diff --git a/Misc/NEWS.d/next/Library/2025-05-17-12-40-12.gh-issue-133889.Eh-zO4.rst b/Misc/NEWS.d/next/Library/2025-05-17-12-40-12.gh-issue-133889.Eh-zO4.rst new file mode 100644 index 00000000000..58b213e29f9 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-17-12-40-12.gh-issue-133889.Eh-zO4.rst @@ -0,0 +1,3 @@ +The generated directory listing page in +:class:`http.server.SimpleHTTPRequestHandler` now only shows the decoded +path component of the requested URL, and not the query and fragment. diff --git a/Misc/NEWS.d/next/Library/2025-05-17-18-08-35.gh-issue-133890.onn9_X.rst b/Misc/NEWS.d/next/Library/2025-05-17-18-08-35.gh-issue-133890.onn9_X.rst new file mode 100644 index 00000000000..44565a5424e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-17-18-08-35.gh-issue-133890.onn9_X.rst @@ -0,0 +1,2 @@ +The :mod:`tarfile` module now handles :exc:`UnicodeEncodeError` in the same +way as :exc:`OSError` when cannot extract a member. diff --git a/Misc/NEWS.d/next/Library/2025-05-18-07-25-15.gh-issue-134173.53oOoF.rst b/Misc/NEWS.d/next/Library/2025-05-18-07-25-15.gh-issue-134173.53oOoF.rst new file mode 100644 index 00000000000..57fba5e21a3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2025-05-18-07-25-15.gh-issue-134173.53oOoF.rst @@ -0,0 +1,3 @@ +Speed up :mod:`asyncio` performance of transferring state from thread +pool :class:`concurrent.futures.Future` by up to 4.4x. Patch by J. Nick +Koston. diff --git a/Misc/NEWS.d/next/Windows/2025-05-07-09-02-19.gh-issue-133562.lqqNW1.rst b/Misc/NEWS.d/next/Windows/2025-05-07-09-02-19.gh-issue-133562.lqqNW1.rst new file mode 100644 index 00000000000..884425b839a --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2025-05-07-09-02-19.gh-issue-133562.lqqNW1.rst @@ -0,0 +1 @@ +Disable handling of security descriptors by :func:`os.mkdir` with mode ``0o700`` on WinAPI partitions that do not support it. This only affects custom builds for specialized targets. diff --git a/Misc/NEWS.d/next/Windows/2025-05-07-11-25-29.gh-issue-133568.oYV0d8.rst b/Misc/NEWS.d/next/Windows/2025-05-07-11-25-29.gh-issue-133568.oYV0d8.rst new file mode 100644 index 00000000000..1e2a5b582cb --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2025-05-07-11-25-29.gh-issue-133568.oYV0d8.rst @@ -0,0 +1 @@ +Fix compile error when using a WinAPI partition that doesn't support the RPC runtime library. diff --git a/Misc/NEWS.d/next/Windows/2025-05-07-11-45-30.gh-issue-133572.Xc2zxH.rst b/Misc/NEWS.d/next/Windows/2025-05-07-11-45-30.gh-issue-133572.Xc2zxH.rst new file mode 100644 index 00000000000..993eceab0b7 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2025-05-07-11-45-30.gh-issue-133572.Xc2zxH.rst @@ -0,0 +1 @@ +Avoid LsaNtStatus to WinError conversion on unsupported WinAPI partitions. diff --git a/Misc/NEWS.d/next/Windows/2025-05-07-13-04-22.gh-issue-133580.jBMujJ.rst b/Misc/NEWS.d/next/Windows/2025-05-07-13-04-22.gh-issue-133580.jBMujJ.rst new file mode 100644 index 00000000000..414a6f87e65 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2025-05-07-13-04-22.gh-issue-133580.jBMujJ.rst @@ -0,0 +1 @@ +Fix :func:`sys.getwindowsversion` failing without setting an exception when called on some WinAPI partitions. diff --git a/Misc/sbom.spdx.json b/Misc/sbom.spdx.json index 4a697d047ca..10a5c646d2a 100644 --- a/Misc/sbom.spdx.json +++ b/Misc/sbom.spdx.json @@ -314,11 +314,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "4b6e7696e8d84f322fb24b1fbb08ccb9b0e7d51b" + "checksumValue": "808af7ff8a2cb2b4ef3a9ce3dbfef58d90828c9f" }, { "algorithm": "SHA256", - "checksumValue": "50a65a34a7a7569eedf7fa864a7892eeee5840a7fdf6fa8f1e87d42c65f6c877" + "checksumValue": "6a492aa586f2d10b1b300ce8ce4c72c976ff7548fee667aded2253f99969ac87" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2b.c" @@ -342,11 +342,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "0f75e44a42775247a46acc2beaa6bae8f199a3d9" + "checksumValue": "1bb072f2be9e5d194274fdcc87825cb094e4b32e" }, { "algorithm": "SHA256", - "checksumValue": "03b612c24193464ed6848aeebbf44f9266b78ec6eed2486056211cde8992c49a" + "checksumValue": "9eb22953ce60dde9dc970fec9dfce9d94235f4b7ccd8f0151cad4707dc835d1d" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c" @@ -384,11 +384,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "65bf44140691b046dcfed3ab1576dbf8bbf96dc5" + "checksumValue": "cdd6e9ca86dbede92d1a47b9224d2af70c599a71" }, { "algorithm": "SHA256", - "checksumValue": "0f98959dafffce039ade9d296f7a05bed151c9c512498f48e4b326a5523a240b" + "checksumValue": "f3204f3e60734d811b6630f879b69ce54eaf367f3fca5889c1026e7a1f3ee1e4" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2s.c" @@ -412,11 +412,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "0da9782455923aede8d8dce9dfdc38f4fc1de572" + "checksumValue": "4056bb6e3ed184400d1610bdfd4260b3fd05ccbb" }, { "algorithm": "SHA256", - "checksumValue": "2d17ae768fd3d7d6decddd8b4aaf23ce02a809ee62bb98da32c8a7f54acf92d0" + "checksumValue": "c93746df2f219cbb1634ee6fb0ab1c4cbd381d1f36c637114c68346c9935326d" } ], "fileName": "Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c" @@ -454,11 +454,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "38e8d96ef1879480780494058a93cec181f8d6d7" + "checksumValue": "61f678cd9234c6eab5d4409ae66f470b068b959b" }, { "algorithm": "SHA256", - "checksumValue": "61e77d2063cf60c96e9ce06af215efe5d42c43026833bffed5732326fe97ed1e" + "checksumValue": "5b91ed0339074e2e546119833398e2cdb7190c33a59c405bf43b2417c789547d" } ], "fileName": "Modules/_hacl/Hacl_Hash_MD5.c" @@ -482,11 +482,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "986dd5ba0b34d15f3e5e5c656979aea1b502e8aa" + "checksumValue": "c7fc5c9721caf37c5a24c9beff27b0ac2ed68cc9" }, { "algorithm": "SHA256", - "checksumValue": "38d5f1f2e67a0eb30789f81fc56c07a6e7246e2b1be6c65485bcca1dcd0e0806" + "checksumValue": "ce08721d491f3b8a9bd4cde6c27bfcc8fc01471512ccca4bd3c0b764cb551d29" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA1.c" @@ -510,11 +510,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "f732a6710fe3e13cd28130f0f20504e347d1c412" + "checksumValue": "fffe8c4f67669ac8ccd87c2e0f95db2427481df1" }, { "algorithm": "SHA256", - "checksumValue": "86cf32e4d1f3ba93a94108271923fdafe2204447792a918acf4a2250f352dbde" + "checksumValue": "f8af382de7e29a73c726f9c70770498ddd99e2c4702489ed6e634f0b68597c95" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA2.c" @@ -538,11 +538,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "50f75337b31f509b5bfcc7ebb3d066b82a0f1b33" + "checksumValue": "77d3d879dfa5949030bca0e8ee75d3d369ec54a7" }, { "algorithm": "SHA256", - "checksumValue": "c9e1442899e5b902fa39f413f1a3131f7ab5c2283d5100dc8ac675a7d5ebbdf1" + "checksumValue": "8744f5b6e054c3e5c44f413a60f9154b76dd7230135dcee26fd063755ec64be1" } ], "fileName": "Modules/_hacl/Hacl_Hash_SHA3.c" @@ -566,11 +566,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "8140310f505bb2619a749777a91487d666237bcf" + "checksumValue": "417e68ac8498cb2f93a06a19003ea1cc3f0f6753" }, { "algorithm": "SHA256", - "checksumValue": "9d95e6a651c22185d9b7c38f363d30159f810e6fcdc2208f29492837ed891e82" + "checksumValue": "843db4bba78a476d4d53dabe4eed5c9235f490ccc9fdaf19e22af488af858920" } ], "fileName": "Modules/_hacl/Hacl_Streaming_HMAC.c" @@ -608,11 +608,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "c9651ef21479c4d8a3b04c5baa1902866dbb1cdf" + "checksumValue": "0a0b7f3714167ad45ddf5a6a48d76f525a119c9c" }, { "algorithm": "SHA256", - "checksumValue": "e039c82ba670606ca111573942baad800f75da467abbc74cd7d1fe175ebcdfaf" + "checksumValue": "135d4afb4812468885c963c9c87a55ba5fae9181df4431af5fbad08588dda229" } ], "fileName": "Modules/_hacl/Lib_Memzero0.c" @@ -622,11 +622,11 @@ "checksums": [ { "algorithm": "SHA1", - "checksumValue": "eaa543c778300238dc23034aafeada0951154af1" + "checksumValue": "911c97a0f24067635b164fbca49da76055f192cd" }, { "algorithm": "SHA256", - "checksumValue": "3fd2552d527a23110d61ad2811c774810efb1eaee008f136c2a0d609daa77f5b" + "checksumValue": "972b5111ebada8e11dd60df3119da1af505fd3e0b6c782ead6cab7f1daf134f1" } ], "fileName": "Modules/_hacl/include/krml/FStar_UInt128_Verified.h" @@ -1752,14 +1752,14 @@ "checksums": [ { "algorithm": "SHA256", - "checksumValue": "02dfcf0c79d488b120d7f2c2a0f9206301c7927ed5106545e0b6f2aef88da76a" + "checksumValue": "39f6fd4f2fe98aecc995a2fd980fae0e0835cef6e563ebbf25f69d3d3102bafd" } ], - "downloadLocation": "https://github.com/hacl-star/hacl-star/archive/7720f6d4fc0468a99d5ea6120976bcc271e42727.zip", + "downloadLocation": "https://github.com/hacl-star/hacl-star/archive/4ef25b547b377dcef855db4289c6a00580e7221c.zip", "externalRefs": [ { "referenceCategory": "SECURITY", - "referenceLocator": "cpe:2.3:a:hacl-star:hacl-star:7720f6d4fc0468a99d5ea6120976bcc271e42727:*:*:*:*:*:*:*", + "referenceLocator": "cpe:2.3:a:hacl-star:hacl-star:4ef25b547b377dcef855db4289c6a00580e7221c:*:*:*:*:*:*:*", "referenceType": "cpe23Type" } ], @@ -1767,7 +1767,7 @@ "name": "hacl-star", "originator": "Organization: HACL* Developers", "primaryPackagePurpose": "SOURCE", - "versionInfo": "7720f6d4fc0468a99d5ea6120976bcc271e42727" + "versionInfo": "4ef25b547b377dcef855db4289c6a00580e7221c" }, { "SPDXID": "SPDXRef-PACKAGE-macholib", diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 404178ca623..9f5ad9f57b7 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1389,7 +1389,7 @@ static PyObject *format_error(PyObject *self, PyObject *args) code = GetLastError(); lpMsgBuf = FormatError(code); if (lpMsgBuf) { - result = PyUnicode_FromWideChar(lpMsgBuf, wcslen(lpMsgBuf)); + result = PyUnicode_FromWideChar(lpMsgBuf, -1); LocalFree(lpMsgBuf); } else { result = PyUnicode_FromString("<no description>"); diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 86bcc805360..163b9264261 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -1253,7 +1253,7 @@ Z_get(void *ptr, Py_ssize_t size) wchar_t *p; p = *(wchar_t **)ptr; if (p) { - return PyUnicode_FromWideChar(p, wcslen(p)); + return PyUnicode_FromWideChar(p, -1); } else { Py_RETURN_NONE; } diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c index cc65cbd98d7..0cd0f043de4 100644 --- a/Modules/_dbmmodule.c +++ b/Modules/_dbmmodule.c @@ -69,6 +69,7 @@ typedef struct { #include "clinic/_dbmmodule.c.h" #define check_dbmobject_open(v, err) \ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED((v)) \ if ((v)->di_dbm == NULL) { \ PyErr_SetString(err, "DBM object has already been closed"); \ return NULL; \ @@ -116,7 +117,7 @@ dbm_dealloc(PyObject *self) } static Py_ssize_t -dbm_length(PyObject *self) +dbm_length_lock_held(PyObject *self) { dbmobject *dp = dbmobject_CAST(self); _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); @@ -138,8 +139,18 @@ dbm_length(PyObject *self) return dp->di_size; } +static Py_ssize_t +dbm_length(PyObject *self) +{ + Py_ssize_t result; + Py_BEGIN_CRITICAL_SECTION(self); + result = dbm_length_lock_held(self); + Py_END_CRITICAL_SECTION(); + return result; +} + static int -dbm_bool(PyObject *self) +dbm_bool_lock_held(PyObject *self) { dbmobject *dp = dbmobject_CAST(self); _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); @@ -170,8 +181,18 @@ dbm_bool(PyObject *self) return 1; } +static int +dbm_bool(PyObject *self) +{ + int result; + Py_BEGIN_CRITICAL_SECTION(self); + result = dbm_bool_lock_held(self); + Py_END_CRITICAL_SECTION(); + return result; +} + static PyObject * -dbm_subscript(PyObject *self, PyObject *key) +dbm_subscript_lock_held(PyObject *self, PyObject *key) { datum drec, krec; Py_ssize_t tmp_size; @@ -197,8 +218,18 @@ dbm_subscript(PyObject *self, PyObject *key) return PyBytes_FromStringAndSize(drec.dptr, drec.dsize); } +static PyObject * +dbm_subscript(PyObject *self, PyObject *key) +{ + PyObject *result; + Py_BEGIN_CRITICAL_SECTION(self); + result = dbm_subscript_lock_held(self, key); + Py_END_CRITICAL_SECTION(); + return result; +} + static int -dbm_ass_sub(PyObject *self, PyObject *v, PyObject *w) +dbm_ass_sub_lock_held(PyObject *self, PyObject *v, PyObject *w) { datum krec, drec; Py_ssize_t tmp_size; @@ -252,7 +283,18 @@ dbm_ass_sub(PyObject *self, PyObject *v, PyObject *w) return 0; } +static int +dbm_ass_sub(PyObject *self, PyObject *v, PyObject *w) +{ + int result; + Py_BEGIN_CRITICAL_SECTION(self); + result = dbm_ass_sub_lock_held(self, v, w); + Py_END_CRITICAL_SECTION(); + return result; +} + /*[clinic input] +@critical_section _dbm.dbm.close Close the database. @@ -260,7 +302,7 @@ Close the database. static PyObject * _dbm_dbm_close_impl(dbmobject *self) -/*[clinic end generated code: output=c8dc5b6709600b86 input=046db72377d51be8]*/ +/*[clinic end generated code: output=c8dc5b6709600b86 input=4a94f79facbc28ca]*/ { if (self->di_dbm) { dbm_close(self->di_dbm); @@ -270,6 +312,7 @@ _dbm_dbm_close_impl(dbmobject *self) } /*[clinic input] +@critical_section _dbm.dbm.keys cls: defining_class @@ -279,7 +322,7 @@ Return a list of all keys in the database. static PyObject * _dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls) -/*[clinic end generated code: output=f2a593b3038e5996 input=d3706a28fc051097]*/ +/*[clinic end generated code: output=f2a593b3038e5996 input=6ddefeadf2a80156]*/ { PyObject *v, *item; datum key; @@ -310,7 +353,7 @@ _dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls) } static int -dbm_contains(PyObject *self, PyObject *arg) +dbm_contains_lock_held(PyObject *self, PyObject *arg) { dbmobject *dp = dbmobject_CAST(self); datum key, val; @@ -343,7 +386,18 @@ dbm_contains(PyObject *self, PyObject *arg) return val.dptr != NULL; } +static int +dbm_contains(PyObject *self, PyObject *arg) +{ + int result; + Py_BEGIN_CRITICAL_SECTION(self); + result = dbm_contains_lock_held(self, arg); + Py_END_CRITICAL_SECTION(); + return result; +} + /*[clinic input] +@critical_section _dbm.dbm.get cls: defining_class key: str(accept={str, robuffer}, zeroes=True) @@ -356,7 +410,7 @@ Return the value for key if present, otherwise default. static PyObject * _dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length, PyObject *default_value) -/*[clinic end generated code: output=b4e55f8b6d482bc4 input=66b993b8349fa8c1]*/ +/*[clinic end generated code: output=b4e55f8b6d482bc4 input=1d88a22bb5e55202]*/ { datum dbm_key, val; _dbm_state *state = PyType_GetModuleState(cls); @@ -373,6 +427,7 @@ _dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, } /*[clinic input] +@critical_section _dbm.dbm.setdefault cls: defining_class key: str(accept={str, robuffer}, zeroes=True) @@ -387,7 +442,7 @@ If key is not in the database, it is inserted with default as the value. static PyObject * _dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length, PyObject *default_value) -/*[clinic end generated code: output=9c2f6ea6d0fb576c input=126a3ff15c5f8232]*/ +/*[clinic end generated code: output=9c2f6ea6d0fb576c input=c01510ef7571e13b]*/ { datum dbm_key, val; Py_ssize_t tmp_size; @@ -427,6 +482,7 @@ _dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, } /*[clinic input] +@critical_section _dbm.dbm.clear cls: defining_class / @@ -436,7 +492,7 @@ Remove all items from the database. static PyObject * _dbm_dbm_clear_impl(dbmobject *self, PyTypeObject *cls) -/*[clinic end generated code: output=8d126b9e1d01a434 input=43aa6ca1acb7f5f5]*/ +/*[clinic end generated code: output=8d126b9e1d01a434 input=a1aa5d99adfb9656]*/ { _dbm_state *state = PyType_GetModuleState(cls); assert(state != NULL); diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index 899eef50ecc..354dbad84b5 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -1383,8 +1383,8 @@ bounded_lru_cache_update_lock_held(lru_cache_object *self, this same key, then this setitem call will update the cache dict with this new link, leaving the old link as an orphan (i.e. not having a cache dict entry that refers to it). */ - if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link, - hash) < 0) { + if (_PyDict_SetItem_KnownHash_LockHeld((PyDictObject *)self->cache, key, + (PyObject *)link, hash) < 0) { Py_DECREF(link); return NULL; } @@ -1453,8 +1453,8 @@ bounded_lru_cache_update_lock_held(lru_cache_object *self, for successful insertion in the cache dict before adding the link to the linked list. Otherwise, the potentially reentrant __eq__ call could cause the then orphan link to be visited. */ - if (_PyDict_SetItem_KnownHash(self->cache, key, (PyObject *)link, - hash) < 0) { + if (_PyDict_SetItem_KnownHash_LockHeld((PyDictObject *)self->cache, key, + (PyObject *)link, hash) < 0) { /* Somehow the cache dict update failed. We no longer can restore the old link. Let the error propagate upward and leave the cache short one link. */ @@ -1689,7 +1689,13 @@ _functools__lru_cache_wrapper_cache_clear_impl(PyObject *self) lru_list_elem *list = lru_cache_unlink_list(_self); FT_ATOMIC_STORE_SSIZE_RELAXED(_self->hits, 0); FT_ATOMIC_STORE_SSIZE_RELAXED(_self->misses, 0); - PyDict_Clear(_self->cache); + if (_self->wrapper == bounded_lru_cache_wrapper) { + /* The critical section on the lru cache itself protects the dictionary + for bounded_lru_cache instances. */ + _PyDict_Clear_LockHeld(_self->cache); + } else { + PyDict_Clear(_self->cache); + } lru_cache_clear_list(list); Py_RETURN_NONE; } diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index ab2ebdba924..9c402e20e51 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -81,6 +81,7 @@ typedef struct { #include "clinic/_gdbmmodule.c.h" #define check_gdbmobject_open(v, err) \ + _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED((v)) \ if ((v)->di_dbm == NULL) { \ PyErr_SetString(err, "GDBM object has already been closed"); \ return NULL; \ @@ -142,7 +143,7 @@ gdbm_dealloc(PyObject *op) } static Py_ssize_t -gdbm_length(PyObject *op) +gdbm_length_lock_held(PyObject *op) { gdbmobject *dp = _gdbmobject_CAST(op); _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); @@ -188,8 +189,18 @@ gdbm_length(PyObject *op) return dp->di_size; } +static Py_ssize_t +gdbm_length(PyObject *op) +{ + Py_ssize_t result; + Py_BEGIN_CRITICAL_SECTION(op); + result = gdbm_length_lock_held(op); + Py_END_CRITICAL_SECTION(); + return result; +} + static int -gdbm_bool(PyObject *op) +gdbm_bool_lock_held(PyObject *op) { gdbmobject *dp = _gdbmobject_CAST(op); _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); @@ -218,6 +229,16 @@ gdbm_bool(PyObject *op) return 1; } +static int +gdbm_bool(PyObject *op) +{ + int result; + Py_BEGIN_CRITICAL_SECTION(op); + result = gdbm_bool_lock_held(op); + Py_END_CRITICAL_SECTION(); + return result; +} + // Wrapper function for PyArg_Parse(o, "s#", &d.dptr, &d.size). // This function is needed to support PY_SSIZE_T_CLEAN. // Return 1 on success, same to PyArg_Parse(). @@ -240,7 +261,7 @@ parse_datum(PyObject *o, datum *d, const char *failmsg) } static PyObject * -gdbm_subscript(PyObject *op, PyObject *key) +gdbm_subscript_lock_held(PyObject *op, PyObject *key) { PyObject *v; datum drec, krec; @@ -265,6 +286,16 @@ gdbm_subscript(PyObject *op, PyObject *key) return v; } +static PyObject * +gdbm_subscript(PyObject *op, PyObject *key) +{ + PyObject *result; + Py_BEGIN_CRITICAL_SECTION(op); + result = gdbm_subscript_lock_held(op, key); + Py_END_CRITICAL_SECTION(); + return result; +} + /*[clinic input] _gdbm.gdbm.get @@ -290,7 +321,7 @@ _gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value) } static int -gdbm_ass_sub(PyObject *op, PyObject *v, PyObject *w) +gdbm_ass_sub_lock_held(PyObject *op, PyObject *v, PyObject *w) { datum krec, drec; const char *failmsg = "gdbm mappings have bytes or string indices only"; @@ -335,7 +366,18 @@ gdbm_ass_sub(PyObject *op, PyObject *v, PyObject *w) return 0; } +static int +gdbm_ass_sub(PyObject *op, PyObject *v, PyObject *w) +{ + int result; + Py_BEGIN_CRITICAL_SECTION(op); + result = gdbm_ass_sub_lock_held(op, v, w); + Py_END_CRITICAL_SECTION(); + return result; +} + /*[clinic input] +@critical_section _gdbm.gdbm.setdefault key: object @@ -348,7 +390,7 @@ Get value for key, or set it to default and return default if not present. static PyObject * _gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, PyObject *default_value) -/*[clinic end generated code: output=f3246e880509f142 input=0db46b69e9680171]*/ +/*[clinic end generated code: output=f3246e880509f142 input=854374cd81ab51b6]*/ { PyObject *res; @@ -363,6 +405,7 @@ _gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, } /*[clinic input] +@critical_section _gdbm.gdbm.close Close the database. @@ -370,7 +413,7 @@ Close the database. static PyObject * _gdbm_gdbm_close_impl(gdbmobject *self) -/*[clinic end generated code: output=f5abb4d6bb9e52d5 input=0a203447379b45fd]*/ +/*[clinic end generated code: output=f5abb4d6bb9e52d5 input=56b604f4e77f533d]*/ { if (self->di_dbm) { gdbm_close(self->di_dbm); @@ -381,6 +424,7 @@ _gdbm_gdbm_close_impl(gdbmobject *self) /* XXX Should return a set or a set view */ /*[clinic input] +@critical_section _gdbm.gdbm.keys cls: defining_class @@ -390,7 +434,7 @@ Get a list of all keys in the database. static PyObject * _gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls) -/*[clinic end generated code: output=c24b824e81404755 input=1428b7c79703d7d5]*/ +/*[clinic end generated code: output=c24b824e81404755 input=785988b1ea8f77e0]*/ { PyObject *v, *item; datum key, nextkey; @@ -432,7 +476,7 @@ _gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls) } static int -gdbm_contains(PyObject *self, PyObject *arg) +gdbm_contains_lock_held(PyObject *self, PyObject *arg) { gdbmobject *dp = (gdbmobject *)self; datum key; @@ -463,7 +507,18 @@ gdbm_contains(PyObject *self, PyObject *arg) return gdbm_exists(dp->di_dbm, key); } +static int +gdbm_contains(PyObject *self, PyObject *arg) +{ + int result; + Py_BEGIN_CRITICAL_SECTION(self); + result = gdbm_contains_lock_held(self, arg); + Py_END_CRITICAL_SECTION(); + return result; +} + /*[clinic input] +@critical_section _gdbm.gdbm.firstkey cls: defining_class @@ -477,7 +532,7 @@ hash values, and won't be sorted by the key values. static PyObject * _gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls) -/*[clinic end generated code: output=139275e9c8b60827 input=ed8782a029a5d299]*/ +/*[clinic end generated code: output=139275e9c8b60827 input=aad5a7c886c542f5]*/ { PyObject *v; datum key; @@ -497,6 +552,7 @@ _gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls) } /*[clinic input] +@critical_section _gdbm.gdbm.nextkey cls: defining_class @@ -517,7 +573,7 @@ to create a list in memory that contains them all: static PyObject * _gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_t key_length) -/*[clinic end generated code: output=c81a69300ef41766 input=365e297bc0b3db48]*/ +/*[clinic end generated code: output=c81a69300ef41766 input=181f1130d5bfeb1e]*/ { PyObject *v; datum dbm_key, nextkey; @@ -539,6 +595,7 @@ _gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, } /*[clinic input] +@critical_section _gdbm.gdbm.reorganize cls: defining_class @@ -554,7 +611,7 @@ kept and reused as new (key,value) pairs are added. static PyObject * _gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls) -/*[clinic end generated code: output=d77c69e8e3dd644a input=e1359faeef844e46]*/ +/*[clinic end generated code: output=d77c69e8e3dd644a input=3e3ca0d2ea787861]*/ { _gdbm_state *state = PyType_GetModuleState(cls); assert(state != NULL); @@ -573,6 +630,7 @@ _gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls) } /*[clinic input] +@critical_section _gdbm.gdbm.sync cls: defining_class @@ -585,7 +643,7 @@ any unwritten data to be written to the disk. static PyObject * _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls) -/*[clinic end generated code: output=bb680a2035c3f592 input=3d749235f79b6f2a]*/ +/*[clinic end generated code: output=bb680a2035c3f592 input=6054385b071d238a]*/ { _gdbm_state *state = PyType_GetModuleState(cls); assert(state != NULL); @@ -595,6 +653,7 @@ _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls) } /*[clinic input] +@critical_section _gdbm.gdbm.clear cls: defining_class / @@ -604,7 +663,7 @@ Remove all items from the database. static PyObject * _gdbm_gdbm_clear_impl(gdbmobject *self, PyTypeObject *cls) -/*[clinic end generated code: output=673577c573318661 input=34136d52fcdd4210]*/ +/*[clinic end generated code: output=673577c573318661 input=b17467adfe62f23d]*/ { _gdbm_state *state = PyType_GetModuleState(cls); assert(state != NULL); diff --git a/Modules/_hacl/Hacl_Hash_Blake2b.c b/Modules/_hacl/Hacl_Hash_Blake2b.c index 21ab2b88c79..a5b75d61798 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2b.c +++ b/Modules/_hacl/Hacl_Hash_Blake2b.c @@ -544,11 +544,9 @@ void Hacl_Hash_Blake2b_init(uint64_t *hash, uint32_t kk, uint32_t nn) uint64_t x = r; os[i] = x;); tmp[0U] = - (uint64_t)nn1 - ^ - ((uint64_t)kk1 - << 8U - ^ ((uint64_t)p.fanout << 16U ^ ((uint64_t)p.depth << 24U ^ (uint64_t)p.leaf_length << 32U))); + (uint64_t)nn1 ^ + ((uint64_t)kk1 << 8U ^ + ((uint64_t)p.fanout << 16U ^ ((uint64_t)p.depth << 24U ^ (uint64_t)p.leaf_length << 32U))); tmp[1U] = p.node_offset; tmp[2U] = (uint64_t)p.node_depth ^ (uint64_t)p.inner_length << 8U; tmp[3U] = 0ULL; @@ -860,14 +858,10 @@ static Hacl_Hash_Blake2b_state_t uint64_t x = r4; os[i0] = x;); tmp[0U] = - (uint64_t)nn1 - ^ - ((uint64_t)kk2 - << 8U - ^ - ((uint64_t)pv.fanout - << 16U - ^ ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); + (uint64_t)nn1 ^ + ((uint64_t)kk2 << 8U ^ + ((uint64_t)pv.fanout << 16U ^ + ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); tmp[1U] = pv.node_offset; tmp[2U] = (uint64_t)pv.node_depth ^ (uint64_t)pv.inner_length << 8U; tmp[3U] = 0ULL; @@ -1059,11 +1053,9 @@ static void reset_raw(Hacl_Hash_Blake2b_state_t *state, Hacl_Hash_Blake2b_params uint64_t x = r; os[i0] = x;); tmp[0U] = - (uint64_t)nn1 - ^ - ((uint64_t)kk2 - << 8U - ^ ((uint64_t)pv.fanout << 16U ^ ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); + (uint64_t)nn1 ^ + ((uint64_t)kk2 << 8U ^ + ((uint64_t)pv.fanout << 16U ^ ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); tmp[1U] = pv.node_offset; tmp[2U] = (uint64_t)pv.node_depth ^ (uint64_t)pv.inner_length << 8U; tmp[3U] = 0ULL; @@ -1200,8 +1192,7 @@ Hacl_Hash_Blake2b_update(Hacl_Hash_Blake2b_state_t *state, uint8_t *chunk, uint3 uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Hash_Blake2b_state_t){ .block_state = block_state1, @@ -1265,8 +1256,7 @@ Hacl_Hash_Blake2b_update(Hacl_Hash_Blake2b_state_t *state, uint8_t *chunk, uint3 nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2b_state_t){ .block_state = block_state1, @@ -1296,8 +1286,7 @@ Hacl_Hash_Blake2b_update(Hacl_Hash_Blake2b_state_t *state, uint8_t *chunk, uint3 uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Hash_Blake2b_state_t){ .block_state = block_state10, @@ -1359,8 +1348,7 @@ Hacl_Hash_Blake2b_update(Hacl_Hash_Blake2b_state_t *state, uint8_t *chunk, uint3 nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2b_state_t){ .block_state = block_state1, @@ -1690,14 +1678,10 @@ Hacl_Hash_Blake2b_hash_with_key_and_params( uint64_t x = r; os[i] = x;); tmp[0U] = - (uint64_t)nn - ^ - ((uint64_t)kk - << 8U - ^ - ((uint64_t)params.fanout - << 16U - ^ ((uint64_t)params.depth << 24U ^ (uint64_t)params.leaf_length << 32U))); + (uint64_t)nn ^ + ((uint64_t)kk << 8U ^ + ((uint64_t)params.fanout << 16U ^ + ((uint64_t)params.depth << 24U ^ (uint64_t)params.leaf_length << 32U))); tmp[1U] = params.node_offset; tmp[2U] = (uint64_t)params.node_depth ^ (uint64_t)params.inner_length << 8U; tmp[3U] = 0ULL; diff --git a/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c b/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c index c4d9b4a689d..f955f1c6115 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c +++ b/Modules/_hacl/Hacl_Hash_Blake2b_Simd256.c @@ -274,11 +274,9 @@ Hacl_Hash_Blake2b_Simd256_init(Lib_IntVector_Intrinsics_vec256 *hash, uint32_t k uint64_t x = r; os[i] = x;); tmp[0U] = - (uint64_t)nn1 - ^ - ((uint64_t)kk1 - << 8U - ^ ((uint64_t)p.fanout << 16U ^ ((uint64_t)p.depth << 24U ^ (uint64_t)p.leaf_length << 32U))); + (uint64_t)nn1 ^ + ((uint64_t)kk1 << 8U ^ + ((uint64_t)p.fanout << 16U ^ ((uint64_t)p.depth << 24U ^ (uint64_t)p.leaf_length << 32U))); tmp[1U] = p.node_offset; tmp[2U] = (uint64_t)p.node_depth ^ (uint64_t)p.inner_length << 8U; tmp[3U] = 0ULL; @@ -746,14 +744,10 @@ static Hacl_Hash_Blake2b_Simd256_state_t uint64_t x = r4; os[i0] = x;); tmp[0U] = - (uint64_t)nn1 - ^ - ((uint64_t)kk2 - << 8U - ^ - ((uint64_t)pv.fanout - << 16U - ^ ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); + (uint64_t)nn1 ^ + ((uint64_t)kk2 << 8U ^ + ((uint64_t)pv.fanout << 16U ^ + ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); tmp[1U] = pv.node_offset; tmp[2U] = (uint64_t)pv.node_depth ^ (uint64_t)pv.inner_length << 8U; tmp[3U] = 0ULL; @@ -936,11 +930,9 @@ reset_raw(Hacl_Hash_Blake2b_Simd256_state_t *state, Hacl_Hash_Blake2b_params_and uint64_t x = r; os[i0] = x;); tmp[0U] = - (uint64_t)nn1 - ^ - ((uint64_t)kk2 - << 8U - ^ ((uint64_t)pv.fanout << 16U ^ ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); + (uint64_t)nn1 ^ + ((uint64_t)kk2 << 8U ^ + ((uint64_t)pv.fanout << 16U ^ ((uint64_t)pv.depth << 24U ^ (uint64_t)pv.leaf_length << 32U))); tmp[1U] = pv.node_offset; tmp[2U] = (uint64_t)pv.node_depth ^ (uint64_t)pv.inner_length << 8U; tmp[3U] = 0ULL; @@ -1075,8 +1067,7 @@ Hacl_Hash_Blake2b_Simd256_update( uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Hash_Blake2b_Simd256_state_t){ .block_state = block_state1, @@ -1140,8 +1131,7 @@ Hacl_Hash_Blake2b_Simd256_update( nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2b_Simd256_state_t){ .block_state = block_state1, @@ -1171,8 +1161,7 @@ Hacl_Hash_Blake2b_Simd256_update( uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Hash_Blake2b_Simd256_state_t){ .block_state = block_state10, @@ -1234,8 +1223,7 @@ Hacl_Hash_Blake2b_Simd256_update( nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2b_Simd256_state_t){ .block_state = block_state1, @@ -1578,14 +1566,10 @@ Hacl_Hash_Blake2b_Simd256_hash_with_key_and_params( uint64_t x = r; os[i] = x;); tmp[0U] = - (uint64_t)nn - ^ - ((uint64_t)kk - << 8U - ^ - ((uint64_t)params.fanout - << 16U - ^ ((uint64_t)params.depth << 24U ^ (uint64_t)params.leaf_length << 32U))); + (uint64_t)nn ^ + ((uint64_t)kk << 8U ^ + ((uint64_t)params.fanout << 16U ^ + ((uint64_t)params.depth << 24U ^ (uint64_t)params.leaf_length << 32U))); tmp[1U] = params.node_offset; tmp[2U] = (uint64_t)params.node_depth ^ (uint64_t)params.inner_length << 8U; tmp[3U] = 0ULL; diff --git a/Modules/_hacl/Hacl_Hash_Blake2s.c b/Modules/_hacl/Hacl_Hash_Blake2s.c index 730ba135afb..0d4fcc7f395 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2s.c +++ b/Modules/_hacl/Hacl_Hash_Blake2s.c @@ -543,13 +543,13 @@ void Hacl_Hash_Blake2s_init(uint32_t *hash, uint32_t kk, uint32_t nn) uint32_t x = r; os[i] = x;); tmp[0U] = - (uint32_t)(uint8_t)nn - ^ ((uint32_t)(uint8_t)kk << 8U ^ ((uint32_t)p.fanout << 16U ^ (uint32_t)p.depth << 24U)); + (uint32_t)(uint8_t)nn ^ + ((uint32_t)(uint8_t)kk << 8U ^ ((uint32_t)p.fanout << 16U ^ (uint32_t)p.depth << 24U)); tmp[1U] = p.leaf_length; tmp[2U] = (uint32_t)p.node_offset; tmp[3U] = - (uint32_t)(p.node_offset >> 32U) - ^ ((uint32_t)p.node_depth << 16U ^ (uint32_t)p.inner_length << 24U); + (uint32_t)(p.node_offset >> 32U) ^ + ((uint32_t)p.node_depth << 16U ^ (uint32_t)p.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; @@ -846,16 +846,14 @@ static Hacl_Hash_Blake2s_state_t uint32_t x = r4; os[i0] = x;); tmp[0U] = - (uint32_t)pv.digest_length - ^ - ((uint32_t)pv.key_length - << 8U - ^ ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); + (uint32_t)pv.digest_length ^ + ((uint32_t)pv.key_length << 8U ^ + ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); tmp[1U] = pv.leaf_length; tmp[2U] = (uint32_t)pv.node_offset; tmp[3U] = - (uint32_t)(pv.node_offset >> 32U) - ^ ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); + (uint32_t)(pv.node_offset >> 32U) ^ + ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; @@ -1042,13 +1040,13 @@ static void reset_raw(Hacl_Hash_Blake2s_state_t *state, Hacl_Hash_Blake2b_params uint32_t x = r; os[i0] = x;); tmp[0U] = - (uint32_t)pv.digest_length - ^ ((uint32_t)pv.key_length << 8U ^ ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); + (uint32_t)pv.digest_length ^ + ((uint32_t)pv.key_length << 8U ^ ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); tmp[1U] = pv.leaf_length; tmp[2U] = (uint32_t)pv.node_offset; tmp[3U] = - (uint32_t)(pv.node_offset >> 32U) - ^ ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); + (uint32_t)(pv.node_offset >> 32U) ^ + ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; @@ -1182,8 +1180,7 @@ Hacl_Hash_Blake2s_update(Hacl_Hash_Blake2s_state_t *state, uint8_t *chunk, uint3 uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Hash_Blake2s_state_t){ .block_state = block_state1, @@ -1237,8 +1234,7 @@ Hacl_Hash_Blake2s_update(Hacl_Hash_Blake2s_state_t *state, uint8_t *chunk, uint3 Hacl_Hash_Blake2s_update_multi(data1_len, wv, hash, total_len1, data1, nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2s_state_t){ .block_state = block_state1, @@ -1268,8 +1264,7 @@ Hacl_Hash_Blake2s_update(Hacl_Hash_Blake2s_state_t *state, uint8_t *chunk, uint3 uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Hash_Blake2s_state_t){ .block_state = block_state10, @@ -1321,8 +1316,7 @@ Hacl_Hash_Blake2s_update(Hacl_Hash_Blake2s_state_t *state, uint8_t *chunk, uint3 Hacl_Hash_Blake2s_update_multi(data1_len, wv, hash, total_len1, data1, nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2s_state_t){ .block_state = block_state1, @@ -1639,16 +1633,14 @@ Hacl_Hash_Blake2s_hash_with_key_and_params( uint32_t x = r; os[i] = x;); tmp[0U] = - (uint32_t)params.digest_length - ^ - ((uint32_t)params.key_length - << 8U - ^ ((uint32_t)params.fanout << 16U ^ (uint32_t)params.depth << 24U)); + (uint32_t)params.digest_length ^ + ((uint32_t)params.key_length << 8U ^ + ((uint32_t)params.fanout << 16U ^ (uint32_t)params.depth << 24U)); tmp[1U] = params.leaf_length; tmp[2U] = (uint32_t)params.node_offset; tmp[3U] = - (uint32_t)(params.node_offset >> 32U) - ^ ((uint32_t)params.node_depth << 16U ^ (uint32_t)params.inner_length << 24U); + (uint32_t)(params.node_offset >> 32U) ^ + ((uint32_t)params.node_depth << 16U ^ (uint32_t)params.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; diff --git a/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c b/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c index 7e9cd79544f..fa46be045f3 100644 --- a/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c +++ b/Modules/_hacl/Hacl_Hash_Blake2s_Simd128.c @@ -271,13 +271,13 @@ Hacl_Hash_Blake2s_Simd128_init(Lib_IntVector_Intrinsics_vec128 *hash, uint32_t k uint32_t x = r; os[i] = x;); tmp[0U] = - (uint32_t)(uint8_t)nn - ^ ((uint32_t)(uint8_t)kk << 8U ^ ((uint32_t)p.fanout << 16U ^ (uint32_t)p.depth << 24U)); + (uint32_t)(uint8_t)nn ^ + ((uint32_t)(uint8_t)kk << 8U ^ ((uint32_t)p.fanout << 16U ^ (uint32_t)p.depth << 24U)); tmp[1U] = p.leaf_length; tmp[2U] = (uint32_t)p.node_offset; tmp[3U] = - (uint32_t)(p.node_offset >> 32U) - ^ ((uint32_t)p.node_depth << 16U ^ (uint32_t)p.inner_length << 24U); + (uint32_t)(p.node_offset >> 32U) ^ + ((uint32_t)p.node_depth << 16U ^ (uint32_t)p.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; @@ -736,16 +736,14 @@ static Hacl_Hash_Blake2s_Simd128_state_t uint32_t x = r4; os[i0] = x;); tmp[0U] = - (uint32_t)pv.digest_length - ^ - ((uint32_t)pv.key_length - << 8U - ^ ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); + (uint32_t)pv.digest_length ^ + ((uint32_t)pv.key_length << 8U ^ + ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); tmp[1U] = pv.leaf_length; tmp[2U] = (uint32_t)pv.node_offset; tmp[3U] = - (uint32_t)(pv.node_offset >> 32U) - ^ ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); + (uint32_t)(pv.node_offset >> 32U) ^ + ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; @@ -923,13 +921,13 @@ reset_raw(Hacl_Hash_Blake2s_Simd128_state_t *state, Hacl_Hash_Blake2b_params_and uint32_t x = r; os[i0] = x;); tmp[0U] = - (uint32_t)pv.digest_length - ^ ((uint32_t)pv.key_length << 8U ^ ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); + (uint32_t)pv.digest_length ^ + ((uint32_t)pv.key_length << 8U ^ ((uint32_t)pv.fanout << 16U ^ (uint32_t)pv.depth << 24U)); tmp[1U] = pv.leaf_length; tmp[2U] = (uint32_t)pv.node_offset; tmp[3U] = - (uint32_t)(pv.node_offset >> 32U) - ^ ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); + (uint32_t)(pv.node_offset >> 32U) ^ + ((uint32_t)pv.node_depth << 16U ^ (uint32_t)pv.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; @@ -1061,8 +1059,7 @@ Hacl_Hash_Blake2s_Simd128_update( uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Hash_Blake2s_Simd128_state_t){ .block_state = block_state1, @@ -1116,8 +1113,7 @@ Hacl_Hash_Blake2s_Simd128_update( Hacl_Hash_Blake2s_Simd128_update_multi(data1_len, wv, hash, total_len1, data1, nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2s_Simd128_state_t){ .block_state = block_state1, @@ -1147,8 +1143,7 @@ Hacl_Hash_Blake2s_Simd128_update( uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Hash_Blake2s_Simd128_state_t){ .block_state = block_state10, @@ -1200,8 +1195,7 @@ Hacl_Hash_Blake2s_Simd128_update( Hacl_Hash_Blake2s_Simd128_update_multi(data1_len, wv, hash, total_len1, data1, nb); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_Blake2s_Simd128_state_t){ .block_state = block_state1, @@ -1531,16 +1525,14 @@ Hacl_Hash_Blake2s_Simd128_hash_with_key_and_params( uint32_t x = r; os[i] = x;); tmp[0U] = - (uint32_t)params.digest_length - ^ - ((uint32_t)params.key_length - << 8U - ^ ((uint32_t)params.fanout << 16U ^ (uint32_t)params.depth << 24U)); + (uint32_t)params.digest_length ^ + ((uint32_t)params.key_length << 8U ^ + ((uint32_t)params.fanout << 16U ^ (uint32_t)params.depth << 24U)); tmp[1U] = params.leaf_length; tmp[2U] = (uint32_t)params.node_offset; tmp[3U] = - (uint32_t)(params.node_offset >> 32U) - ^ ((uint32_t)params.node_depth << 16U ^ (uint32_t)params.inner_length << 24U); + (uint32_t)(params.node_offset >> 32U) ^ + ((uint32_t)params.node_depth << 16U ^ (uint32_t)params.inner_length << 24U); uint32_t tmp0 = tmp[0U]; uint32_t tmp1 = tmp[1U]; uint32_t tmp2 = tmp[2U]; diff --git a/Modules/_hacl/Hacl_Hash_MD5.c b/Modules/_hacl/Hacl_Hash_MD5.c index 75ce8d2926e..305c75483c0 100644 --- a/Modules/_hacl/Hacl_Hash_MD5.c +++ b/Modules/_hacl/Hacl_Hash_MD5.c @@ -66,11 +66,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti0 = _t[0U]; uint32_t v = - vb0 - + - ((va + ((vb0 & vc0) | (~vb0 & vd0)) + xk + ti0) - << 7U - | (va + ((vb0 & vc0) | (~vb0 & vd0)) + xk + ti0) >> 25U); + vb0 + + ((va + ((vb0 & vc0) | (~vb0 & vd0)) + xk + ti0) << 7U | + (va + ((vb0 & vc0) | (~vb0 & vd0)) + xk + ti0) >> 25U); abcd[0U] = v; uint32_t va0 = abcd[3U]; uint32_t vb1 = abcd[0U]; @@ -82,11 +80,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti1 = _t[1U]; uint32_t v0 = - vb1 - + - ((va0 + ((vb1 & vc1) | (~vb1 & vd1)) + xk0 + ti1) - << 12U - | (va0 + ((vb1 & vc1) | (~vb1 & vd1)) + xk0 + ti1) >> 20U); + vb1 + + ((va0 + ((vb1 & vc1) | (~vb1 & vd1)) + xk0 + ti1) << 12U | + (va0 + ((vb1 & vc1) | (~vb1 & vd1)) + xk0 + ti1) >> 20U); abcd[3U] = v0; uint32_t va1 = abcd[2U]; uint32_t vb2 = abcd[3U]; @@ -98,11 +94,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti2 = _t[2U]; uint32_t v1 = - vb2 - + - ((va1 + ((vb2 & vc2) | (~vb2 & vd2)) + xk1 + ti2) - << 17U - | (va1 + ((vb2 & vc2) | (~vb2 & vd2)) + xk1 + ti2) >> 15U); + vb2 + + ((va1 + ((vb2 & vc2) | (~vb2 & vd2)) + xk1 + ti2) << 17U | + (va1 + ((vb2 & vc2) | (~vb2 & vd2)) + xk1 + ti2) >> 15U); abcd[2U] = v1; uint32_t va2 = abcd[1U]; uint32_t vb3 = abcd[2U]; @@ -114,11 +108,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti3 = _t[3U]; uint32_t v2 = - vb3 - + - ((va2 + ((vb3 & vc3) | (~vb3 & vd3)) + xk2 + ti3) - << 22U - | (va2 + ((vb3 & vc3) | (~vb3 & vd3)) + xk2 + ti3) >> 10U); + vb3 + + ((va2 + ((vb3 & vc3) | (~vb3 & vd3)) + xk2 + ti3) << 22U | + (va2 + ((vb3 & vc3) | (~vb3 & vd3)) + xk2 + ti3) >> 10U); abcd[1U] = v2; uint32_t va3 = abcd[0U]; uint32_t vb4 = abcd[1U]; @@ -130,11 +122,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti4 = _t[4U]; uint32_t v3 = - vb4 - + - ((va3 + ((vb4 & vc4) | (~vb4 & vd4)) + xk3 + ti4) - << 7U - | (va3 + ((vb4 & vc4) | (~vb4 & vd4)) + xk3 + ti4) >> 25U); + vb4 + + ((va3 + ((vb4 & vc4) | (~vb4 & vd4)) + xk3 + ti4) << 7U | + (va3 + ((vb4 & vc4) | (~vb4 & vd4)) + xk3 + ti4) >> 25U); abcd[0U] = v3; uint32_t va4 = abcd[3U]; uint32_t vb5 = abcd[0U]; @@ -146,11 +136,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti5 = _t[5U]; uint32_t v4 = - vb5 - + - ((va4 + ((vb5 & vc5) | (~vb5 & vd5)) + xk4 + ti5) - << 12U - | (va4 + ((vb5 & vc5) | (~vb5 & vd5)) + xk4 + ti5) >> 20U); + vb5 + + ((va4 + ((vb5 & vc5) | (~vb5 & vd5)) + xk4 + ti5) << 12U | + (va4 + ((vb5 & vc5) | (~vb5 & vd5)) + xk4 + ti5) >> 20U); abcd[3U] = v4; uint32_t va5 = abcd[2U]; uint32_t vb6 = abcd[3U]; @@ -162,11 +150,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti6 = _t[6U]; uint32_t v5 = - vb6 - + - ((va5 + ((vb6 & vc6) | (~vb6 & vd6)) + xk5 + ti6) - << 17U - | (va5 + ((vb6 & vc6) | (~vb6 & vd6)) + xk5 + ti6) >> 15U); + vb6 + + ((va5 + ((vb6 & vc6) | (~vb6 & vd6)) + xk5 + ti6) << 17U | + (va5 + ((vb6 & vc6) | (~vb6 & vd6)) + xk5 + ti6) >> 15U); abcd[2U] = v5; uint32_t va6 = abcd[1U]; uint32_t vb7 = abcd[2U]; @@ -178,11 +164,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti7 = _t[7U]; uint32_t v6 = - vb7 - + - ((va6 + ((vb7 & vc7) | (~vb7 & vd7)) + xk6 + ti7) - << 22U - | (va6 + ((vb7 & vc7) | (~vb7 & vd7)) + xk6 + ti7) >> 10U); + vb7 + + ((va6 + ((vb7 & vc7) | (~vb7 & vd7)) + xk6 + ti7) << 22U | + (va6 + ((vb7 & vc7) | (~vb7 & vd7)) + xk6 + ti7) >> 10U); abcd[1U] = v6; uint32_t va7 = abcd[0U]; uint32_t vb8 = abcd[1U]; @@ -194,11 +178,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti8 = _t[8U]; uint32_t v7 = - vb8 - + - ((va7 + ((vb8 & vc8) | (~vb8 & vd8)) + xk7 + ti8) - << 7U - | (va7 + ((vb8 & vc8) | (~vb8 & vd8)) + xk7 + ti8) >> 25U); + vb8 + + ((va7 + ((vb8 & vc8) | (~vb8 & vd8)) + xk7 + ti8) << 7U | + (va7 + ((vb8 & vc8) | (~vb8 & vd8)) + xk7 + ti8) >> 25U); abcd[0U] = v7; uint32_t va8 = abcd[3U]; uint32_t vb9 = abcd[0U]; @@ -210,11 +192,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti9 = _t[9U]; uint32_t v8 = - vb9 - + - ((va8 + ((vb9 & vc9) | (~vb9 & vd9)) + xk8 + ti9) - << 12U - | (va8 + ((vb9 & vc9) | (~vb9 & vd9)) + xk8 + ti9) >> 20U); + vb9 + + ((va8 + ((vb9 & vc9) | (~vb9 & vd9)) + xk8 + ti9) << 12U | + (va8 + ((vb9 & vc9) | (~vb9 & vd9)) + xk8 + ti9) >> 20U); abcd[3U] = v8; uint32_t va9 = abcd[2U]; uint32_t vb10 = abcd[3U]; @@ -226,11 +206,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti10 = _t[10U]; uint32_t v9 = - vb10 - + - ((va9 + ((vb10 & vc10) | (~vb10 & vd10)) + xk9 + ti10) - << 17U - | (va9 + ((vb10 & vc10) | (~vb10 & vd10)) + xk9 + ti10) >> 15U); + vb10 + + ((va9 + ((vb10 & vc10) | (~vb10 & vd10)) + xk9 + ti10) << 17U | + (va9 + ((vb10 & vc10) | (~vb10 & vd10)) + xk9 + ti10) >> 15U); abcd[2U] = v9; uint32_t va10 = abcd[1U]; uint32_t vb11 = abcd[2U]; @@ -242,11 +220,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti11 = _t[11U]; uint32_t v10 = - vb11 - + - ((va10 + ((vb11 & vc11) | (~vb11 & vd11)) + xk10 + ti11) - << 22U - | (va10 + ((vb11 & vc11) | (~vb11 & vd11)) + xk10 + ti11) >> 10U); + vb11 + + ((va10 + ((vb11 & vc11) | (~vb11 & vd11)) + xk10 + ti11) << 22U | + (va10 + ((vb11 & vc11) | (~vb11 & vd11)) + xk10 + ti11) >> 10U); abcd[1U] = v10; uint32_t va11 = abcd[0U]; uint32_t vb12 = abcd[1U]; @@ -258,11 +234,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti12 = _t[12U]; uint32_t v11 = - vb12 - + - ((va11 + ((vb12 & vc12) | (~vb12 & vd12)) + xk11 + ti12) - << 7U - | (va11 + ((vb12 & vc12) | (~vb12 & vd12)) + xk11 + ti12) >> 25U); + vb12 + + ((va11 + ((vb12 & vc12) | (~vb12 & vd12)) + xk11 + ti12) << 7U | + (va11 + ((vb12 & vc12) | (~vb12 & vd12)) + xk11 + ti12) >> 25U); abcd[0U] = v11; uint32_t va12 = abcd[3U]; uint32_t vb13 = abcd[0U]; @@ -274,11 +248,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti13 = _t[13U]; uint32_t v12 = - vb13 - + - ((va12 + ((vb13 & vc13) | (~vb13 & vd13)) + xk12 + ti13) - << 12U - | (va12 + ((vb13 & vc13) | (~vb13 & vd13)) + xk12 + ti13) >> 20U); + vb13 + + ((va12 + ((vb13 & vc13) | (~vb13 & vd13)) + xk12 + ti13) << 12U | + (va12 + ((vb13 & vc13) | (~vb13 & vd13)) + xk12 + ti13) >> 20U); abcd[3U] = v12; uint32_t va13 = abcd[2U]; uint32_t vb14 = abcd[3U]; @@ -290,11 +262,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti14 = _t[14U]; uint32_t v13 = - vb14 - + - ((va13 + ((vb14 & vc14) | (~vb14 & vd14)) + xk13 + ti14) - << 17U - | (va13 + ((vb14 & vc14) | (~vb14 & vd14)) + xk13 + ti14) >> 15U); + vb14 + + ((va13 + ((vb14 & vc14) | (~vb14 & vd14)) + xk13 + ti14) << 17U | + (va13 + ((vb14 & vc14) | (~vb14 & vd14)) + xk13 + ti14) >> 15U); abcd[2U] = v13; uint32_t va14 = abcd[1U]; uint32_t vb15 = abcd[2U]; @@ -306,11 +276,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti15 = _t[15U]; uint32_t v14 = - vb15 - + - ((va14 + ((vb15 & vc15) | (~vb15 & vd15)) + xk14 + ti15) - << 22U - | (va14 + ((vb15 & vc15) | (~vb15 & vd15)) + xk14 + ti15) >> 10U); + vb15 + + ((va14 + ((vb15 & vc15) | (~vb15 & vd15)) + xk14 + ti15) << 22U | + (va14 + ((vb15 & vc15) | (~vb15 & vd15)) + xk14 + ti15) >> 10U); abcd[1U] = v14; uint32_t va15 = abcd[0U]; uint32_t vb16 = abcd[1U]; @@ -322,11 +290,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti16 = _t[16U]; uint32_t v15 = - vb16 - + - ((va15 + ((vb16 & vd16) | (vc16 & ~vd16)) + xk15 + ti16) - << 5U - | (va15 + ((vb16 & vd16) | (vc16 & ~vd16)) + xk15 + ti16) >> 27U); + vb16 + + ((va15 + ((vb16 & vd16) | (vc16 & ~vd16)) + xk15 + ti16) << 5U | + (va15 + ((vb16 & vd16) | (vc16 & ~vd16)) + xk15 + ti16) >> 27U); abcd[0U] = v15; uint32_t va16 = abcd[3U]; uint32_t vb17 = abcd[0U]; @@ -338,11 +304,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti17 = _t[17U]; uint32_t v16 = - vb17 - + - ((va16 + ((vb17 & vd17) | (vc17 & ~vd17)) + xk16 + ti17) - << 9U - | (va16 + ((vb17 & vd17) | (vc17 & ~vd17)) + xk16 + ti17) >> 23U); + vb17 + + ((va16 + ((vb17 & vd17) | (vc17 & ~vd17)) + xk16 + ti17) << 9U | + (va16 + ((vb17 & vd17) | (vc17 & ~vd17)) + xk16 + ti17) >> 23U); abcd[3U] = v16; uint32_t va17 = abcd[2U]; uint32_t vb18 = abcd[3U]; @@ -354,11 +318,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti18 = _t[18U]; uint32_t v17 = - vb18 - + - ((va17 + ((vb18 & vd18) | (vc18 & ~vd18)) + xk17 + ti18) - << 14U - | (va17 + ((vb18 & vd18) | (vc18 & ~vd18)) + xk17 + ti18) >> 18U); + vb18 + + ((va17 + ((vb18 & vd18) | (vc18 & ~vd18)) + xk17 + ti18) << 14U | + (va17 + ((vb18 & vd18) | (vc18 & ~vd18)) + xk17 + ti18) >> 18U); abcd[2U] = v17; uint32_t va18 = abcd[1U]; uint32_t vb19 = abcd[2U]; @@ -370,11 +332,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti19 = _t[19U]; uint32_t v18 = - vb19 - + - ((va18 + ((vb19 & vd19) | (vc19 & ~vd19)) + xk18 + ti19) - << 20U - | (va18 + ((vb19 & vd19) | (vc19 & ~vd19)) + xk18 + ti19) >> 12U); + vb19 + + ((va18 + ((vb19 & vd19) | (vc19 & ~vd19)) + xk18 + ti19) << 20U | + (va18 + ((vb19 & vd19) | (vc19 & ~vd19)) + xk18 + ti19) >> 12U); abcd[1U] = v18; uint32_t va19 = abcd[0U]; uint32_t vb20 = abcd[1U]; @@ -386,11 +346,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti20 = _t[20U]; uint32_t v19 = - vb20 - + - ((va19 + ((vb20 & vd20) | (vc20 & ~vd20)) + xk19 + ti20) - << 5U - | (va19 + ((vb20 & vd20) | (vc20 & ~vd20)) + xk19 + ti20) >> 27U); + vb20 + + ((va19 + ((vb20 & vd20) | (vc20 & ~vd20)) + xk19 + ti20) << 5U | + (va19 + ((vb20 & vd20) | (vc20 & ~vd20)) + xk19 + ti20) >> 27U); abcd[0U] = v19; uint32_t va20 = abcd[3U]; uint32_t vb21 = abcd[0U]; @@ -402,11 +360,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti21 = _t[21U]; uint32_t v20 = - vb21 - + - ((va20 + ((vb21 & vd21) | (vc21 & ~vd21)) + xk20 + ti21) - << 9U - | (va20 + ((vb21 & vd21) | (vc21 & ~vd21)) + xk20 + ti21) >> 23U); + vb21 + + ((va20 + ((vb21 & vd21) | (vc21 & ~vd21)) + xk20 + ti21) << 9U | + (va20 + ((vb21 & vd21) | (vc21 & ~vd21)) + xk20 + ti21) >> 23U); abcd[3U] = v20; uint32_t va21 = abcd[2U]; uint32_t vb22 = abcd[3U]; @@ -418,11 +374,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti22 = _t[22U]; uint32_t v21 = - vb22 - + - ((va21 + ((vb22 & vd22) | (vc22 & ~vd22)) + xk21 + ti22) - << 14U - | (va21 + ((vb22 & vd22) | (vc22 & ~vd22)) + xk21 + ti22) >> 18U); + vb22 + + ((va21 + ((vb22 & vd22) | (vc22 & ~vd22)) + xk21 + ti22) << 14U | + (va21 + ((vb22 & vd22) | (vc22 & ~vd22)) + xk21 + ti22) >> 18U); abcd[2U] = v21; uint32_t va22 = abcd[1U]; uint32_t vb23 = abcd[2U]; @@ -434,11 +388,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti23 = _t[23U]; uint32_t v22 = - vb23 - + - ((va22 + ((vb23 & vd23) | (vc23 & ~vd23)) + xk22 + ti23) - << 20U - | (va22 + ((vb23 & vd23) | (vc23 & ~vd23)) + xk22 + ti23) >> 12U); + vb23 + + ((va22 + ((vb23 & vd23) | (vc23 & ~vd23)) + xk22 + ti23) << 20U | + (va22 + ((vb23 & vd23) | (vc23 & ~vd23)) + xk22 + ti23) >> 12U); abcd[1U] = v22; uint32_t va23 = abcd[0U]; uint32_t vb24 = abcd[1U]; @@ -450,11 +402,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti24 = _t[24U]; uint32_t v23 = - vb24 - + - ((va23 + ((vb24 & vd24) | (vc24 & ~vd24)) + xk23 + ti24) - << 5U - | (va23 + ((vb24 & vd24) | (vc24 & ~vd24)) + xk23 + ti24) >> 27U); + vb24 + + ((va23 + ((vb24 & vd24) | (vc24 & ~vd24)) + xk23 + ti24) << 5U | + (va23 + ((vb24 & vd24) | (vc24 & ~vd24)) + xk23 + ti24) >> 27U); abcd[0U] = v23; uint32_t va24 = abcd[3U]; uint32_t vb25 = abcd[0U]; @@ -466,11 +416,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti25 = _t[25U]; uint32_t v24 = - vb25 - + - ((va24 + ((vb25 & vd25) | (vc25 & ~vd25)) + xk24 + ti25) - << 9U - | (va24 + ((vb25 & vd25) | (vc25 & ~vd25)) + xk24 + ti25) >> 23U); + vb25 + + ((va24 + ((vb25 & vd25) | (vc25 & ~vd25)) + xk24 + ti25) << 9U | + (va24 + ((vb25 & vd25) | (vc25 & ~vd25)) + xk24 + ti25) >> 23U); abcd[3U] = v24; uint32_t va25 = abcd[2U]; uint32_t vb26 = abcd[3U]; @@ -482,11 +430,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti26 = _t[26U]; uint32_t v25 = - vb26 - + - ((va25 + ((vb26 & vd26) | (vc26 & ~vd26)) + xk25 + ti26) - << 14U - | (va25 + ((vb26 & vd26) | (vc26 & ~vd26)) + xk25 + ti26) >> 18U); + vb26 + + ((va25 + ((vb26 & vd26) | (vc26 & ~vd26)) + xk25 + ti26) << 14U | + (va25 + ((vb26 & vd26) | (vc26 & ~vd26)) + xk25 + ti26) >> 18U); abcd[2U] = v25; uint32_t va26 = abcd[1U]; uint32_t vb27 = abcd[2U]; @@ -498,11 +444,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti27 = _t[27U]; uint32_t v26 = - vb27 - + - ((va26 + ((vb27 & vd27) | (vc27 & ~vd27)) + xk26 + ti27) - << 20U - | (va26 + ((vb27 & vd27) | (vc27 & ~vd27)) + xk26 + ti27) >> 12U); + vb27 + + ((va26 + ((vb27 & vd27) | (vc27 & ~vd27)) + xk26 + ti27) << 20U | + (va26 + ((vb27 & vd27) | (vc27 & ~vd27)) + xk26 + ti27) >> 12U); abcd[1U] = v26; uint32_t va27 = abcd[0U]; uint32_t vb28 = abcd[1U]; @@ -514,11 +458,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti28 = _t[28U]; uint32_t v27 = - vb28 - + - ((va27 + ((vb28 & vd28) | (vc28 & ~vd28)) + xk27 + ti28) - << 5U - | (va27 + ((vb28 & vd28) | (vc28 & ~vd28)) + xk27 + ti28) >> 27U); + vb28 + + ((va27 + ((vb28 & vd28) | (vc28 & ~vd28)) + xk27 + ti28) << 5U | + (va27 + ((vb28 & vd28) | (vc28 & ~vd28)) + xk27 + ti28) >> 27U); abcd[0U] = v27; uint32_t va28 = abcd[3U]; uint32_t vb29 = abcd[0U]; @@ -530,11 +472,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti29 = _t[29U]; uint32_t v28 = - vb29 - + - ((va28 + ((vb29 & vd29) | (vc29 & ~vd29)) + xk28 + ti29) - << 9U - | (va28 + ((vb29 & vd29) | (vc29 & ~vd29)) + xk28 + ti29) >> 23U); + vb29 + + ((va28 + ((vb29 & vd29) | (vc29 & ~vd29)) + xk28 + ti29) << 9U | + (va28 + ((vb29 & vd29) | (vc29 & ~vd29)) + xk28 + ti29) >> 23U); abcd[3U] = v28; uint32_t va29 = abcd[2U]; uint32_t vb30 = abcd[3U]; @@ -546,11 +486,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti30 = _t[30U]; uint32_t v29 = - vb30 - + - ((va29 + ((vb30 & vd30) | (vc30 & ~vd30)) + xk29 + ti30) - << 14U - | (va29 + ((vb30 & vd30) | (vc30 & ~vd30)) + xk29 + ti30) >> 18U); + vb30 + + ((va29 + ((vb30 & vd30) | (vc30 & ~vd30)) + xk29 + ti30) << 14U | + (va29 + ((vb30 & vd30) | (vc30 & ~vd30)) + xk29 + ti30) >> 18U); abcd[2U] = v29; uint32_t va30 = abcd[1U]; uint32_t vb31 = abcd[2U]; @@ -562,11 +500,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti31 = _t[31U]; uint32_t v30 = - vb31 - + - ((va30 + ((vb31 & vd31) | (vc31 & ~vd31)) + xk30 + ti31) - << 20U - | (va30 + ((vb31 & vd31) | (vc31 & ~vd31)) + xk30 + ti31) >> 12U); + vb31 + + ((va30 + ((vb31 & vd31) | (vc31 & ~vd31)) + xk30 + ti31) << 20U | + (va30 + ((vb31 & vd31) | (vc31 & ~vd31)) + xk30 + ti31) >> 12U); abcd[1U] = v30; uint32_t va31 = abcd[0U]; uint32_t vb32 = abcd[1U]; @@ -578,11 +514,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti32 = _t[32U]; uint32_t v31 = - vb32 - + - ((va31 + (vb32 ^ (vc32 ^ vd32)) + xk31 + ti32) - << 4U - | (va31 + (vb32 ^ (vc32 ^ vd32)) + xk31 + ti32) >> 28U); + vb32 + + ((va31 + (vb32 ^ (vc32 ^ vd32)) + xk31 + ti32) << 4U | + (va31 + (vb32 ^ (vc32 ^ vd32)) + xk31 + ti32) >> 28U); abcd[0U] = v31; uint32_t va32 = abcd[3U]; uint32_t vb33 = abcd[0U]; @@ -594,11 +528,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti33 = _t[33U]; uint32_t v32 = - vb33 - + - ((va32 + (vb33 ^ (vc33 ^ vd33)) + xk32 + ti33) - << 11U - | (va32 + (vb33 ^ (vc33 ^ vd33)) + xk32 + ti33) >> 21U); + vb33 + + ((va32 + (vb33 ^ (vc33 ^ vd33)) + xk32 + ti33) << 11U | + (va32 + (vb33 ^ (vc33 ^ vd33)) + xk32 + ti33) >> 21U); abcd[3U] = v32; uint32_t va33 = abcd[2U]; uint32_t vb34 = abcd[3U]; @@ -610,11 +542,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti34 = _t[34U]; uint32_t v33 = - vb34 - + - ((va33 + (vb34 ^ (vc34 ^ vd34)) + xk33 + ti34) - << 16U - | (va33 + (vb34 ^ (vc34 ^ vd34)) + xk33 + ti34) >> 16U); + vb34 + + ((va33 + (vb34 ^ (vc34 ^ vd34)) + xk33 + ti34) << 16U | + (va33 + (vb34 ^ (vc34 ^ vd34)) + xk33 + ti34) >> 16U); abcd[2U] = v33; uint32_t va34 = abcd[1U]; uint32_t vb35 = abcd[2U]; @@ -626,11 +556,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti35 = _t[35U]; uint32_t v34 = - vb35 - + - ((va34 + (vb35 ^ (vc35 ^ vd35)) + xk34 + ti35) - << 23U - | (va34 + (vb35 ^ (vc35 ^ vd35)) + xk34 + ti35) >> 9U); + vb35 + + ((va34 + (vb35 ^ (vc35 ^ vd35)) + xk34 + ti35) << 23U | + (va34 + (vb35 ^ (vc35 ^ vd35)) + xk34 + ti35) >> 9U); abcd[1U] = v34; uint32_t va35 = abcd[0U]; uint32_t vb36 = abcd[1U]; @@ -642,11 +570,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti36 = _t[36U]; uint32_t v35 = - vb36 - + - ((va35 + (vb36 ^ (vc36 ^ vd36)) + xk35 + ti36) - << 4U - | (va35 + (vb36 ^ (vc36 ^ vd36)) + xk35 + ti36) >> 28U); + vb36 + + ((va35 + (vb36 ^ (vc36 ^ vd36)) + xk35 + ti36) << 4U | + (va35 + (vb36 ^ (vc36 ^ vd36)) + xk35 + ti36) >> 28U); abcd[0U] = v35; uint32_t va36 = abcd[3U]; uint32_t vb37 = abcd[0U]; @@ -658,11 +584,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti37 = _t[37U]; uint32_t v36 = - vb37 - + - ((va36 + (vb37 ^ (vc37 ^ vd37)) + xk36 + ti37) - << 11U - | (va36 + (vb37 ^ (vc37 ^ vd37)) + xk36 + ti37) >> 21U); + vb37 + + ((va36 + (vb37 ^ (vc37 ^ vd37)) + xk36 + ti37) << 11U | + (va36 + (vb37 ^ (vc37 ^ vd37)) + xk36 + ti37) >> 21U); abcd[3U] = v36; uint32_t va37 = abcd[2U]; uint32_t vb38 = abcd[3U]; @@ -674,11 +598,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti38 = _t[38U]; uint32_t v37 = - vb38 - + - ((va37 + (vb38 ^ (vc38 ^ vd38)) + xk37 + ti38) - << 16U - | (va37 + (vb38 ^ (vc38 ^ vd38)) + xk37 + ti38) >> 16U); + vb38 + + ((va37 + (vb38 ^ (vc38 ^ vd38)) + xk37 + ti38) << 16U | + (va37 + (vb38 ^ (vc38 ^ vd38)) + xk37 + ti38) >> 16U); abcd[2U] = v37; uint32_t va38 = abcd[1U]; uint32_t vb39 = abcd[2U]; @@ -690,11 +612,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti39 = _t[39U]; uint32_t v38 = - vb39 - + - ((va38 + (vb39 ^ (vc39 ^ vd39)) + xk38 + ti39) - << 23U - | (va38 + (vb39 ^ (vc39 ^ vd39)) + xk38 + ti39) >> 9U); + vb39 + + ((va38 + (vb39 ^ (vc39 ^ vd39)) + xk38 + ti39) << 23U | + (va38 + (vb39 ^ (vc39 ^ vd39)) + xk38 + ti39) >> 9U); abcd[1U] = v38; uint32_t va39 = abcd[0U]; uint32_t vb40 = abcd[1U]; @@ -706,11 +626,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti40 = _t[40U]; uint32_t v39 = - vb40 - + - ((va39 + (vb40 ^ (vc40 ^ vd40)) + xk39 + ti40) - << 4U - | (va39 + (vb40 ^ (vc40 ^ vd40)) + xk39 + ti40) >> 28U); + vb40 + + ((va39 + (vb40 ^ (vc40 ^ vd40)) + xk39 + ti40) << 4U | + (va39 + (vb40 ^ (vc40 ^ vd40)) + xk39 + ti40) >> 28U); abcd[0U] = v39; uint32_t va40 = abcd[3U]; uint32_t vb41 = abcd[0U]; @@ -722,11 +640,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti41 = _t[41U]; uint32_t v40 = - vb41 - + - ((va40 + (vb41 ^ (vc41 ^ vd41)) + xk40 + ti41) - << 11U - | (va40 + (vb41 ^ (vc41 ^ vd41)) + xk40 + ti41) >> 21U); + vb41 + + ((va40 + (vb41 ^ (vc41 ^ vd41)) + xk40 + ti41) << 11U | + (va40 + (vb41 ^ (vc41 ^ vd41)) + xk40 + ti41) >> 21U); abcd[3U] = v40; uint32_t va41 = abcd[2U]; uint32_t vb42 = abcd[3U]; @@ -738,11 +654,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti42 = _t[42U]; uint32_t v41 = - vb42 - + - ((va41 + (vb42 ^ (vc42 ^ vd42)) + xk41 + ti42) - << 16U - | (va41 + (vb42 ^ (vc42 ^ vd42)) + xk41 + ti42) >> 16U); + vb42 + + ((va41 + (vb42 ^ (vc42 ^ vd42)) + xk41 + ti42) << 16U | + (va41 + (vb42 ^ (vc42 ^ vd42)) + xk41 + ti42) >> 16U); abcd[2U] = v41; uint32_t va42 = abcd[1U]; uint32_t vb43 = abcd[2U]; @@ -754,11 +668,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti43 = _t[43U]; uint32_t v42 = - vb43 - + - ((va42 + (vb43 ^ (vc43 ^ vd43)) + xk42 + ti43) - << 23U - | (va42 + (vb43 ^ (vc43 ^ vd43)) + xk42 + ti43) >> 9U); + vb43 + + ((va42 + (vb43 ^ (vc43 ^ vd43)) + xk42 + ti43) << 23U | + (va42 + (vb43 ^ (vc43 ^ vd43)) + xk42 + ti43) >> 9U); abcd[1U] = v42; uint32_t va43 = abcd[0U]; uint32_t vb44 = abcd[1U]; @@ -770,11 +682,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti44 = _t[44U]; uint32_t v43 = - vb44 - + - ((va43 + (vb44 ^ (vc44 ^ vd44)) + xk43 + ti44) - << 4U - | (va43 + (vb44 ^ (vc44 ^ vd44)) + xk43 + ti44) >> 28U); + vb44 + + ((va43 + (vb44 ^ (vc44 ^ vd44)) + xk43 + ti44) << 4U | + (va43 + (vb44 ^ (vc44 ^ vd44)) + xk43 + ti44) >> 28U); abcd[0U] = v43; uint32_t va44 = abcd[3U]; uint32_t vb45 = abcd[0U]; @@ -786,11 +696,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti45 = _t[45U]; uint32_t v44 = - vb45 - + - ((va44 + (vb45 ^ (vc45 ^ vd45)) + xk44 + ti45) - << 11U - | (va44 + (vb45 ^ (vc45 ^ vd45)) + xk44 + ti45) >> 21U); + vb45 + + ((va44 + (vb45 ^ (vc45 ^ vd45)) + xk44 + ti45) << 11U | + (va44 + (vb45 ^ (vc45 ^ vd45)) + xk44 + ti45) >> 21U); abcd[3U] = v44; uint32_t va45 = abcd[2U]; uint32_t vb46 = abcd[3U]; @@ -802,11 +710,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti46 = _t[46U]; uint32_t v45 = - vb46 - + - ((va45 + (vb46 ^ (vc46 ^ vd46)) + xk45 + ti46) - << 16U - | (va45 + (vb46 ^ (vc46 ^ vd46)) + xk45 + ti46) >> 16U); + vb46 + + ((va45 + (vb46 ^ (vc46 ^ vd46)) + xk45 + ti46) << 16U | + (va45 + (vb46 ^ (vc46 ^ vd46)) + xk45 + ti46) >> 16U); abcd[2U] = v45; uint32_t va46 = abcd[1U]; uint32_t vb47 = abcd[2U]; @@ -818,11 +724,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti47 = _t[47U]; uint32_t v46 = - vb47 - + - ((va46 + (vb47 ^ (vc47 ^ vd47)) + xk46 + ti47) - << 23U - | (va46 + (vb47 ^ (vc47 ^ vd47)) + xk46 + ti47) >> 9U); + vb47 + + ((va46 + (vb47 ^ (vc47 ^ vd47)) + xk46 + ti47) << 23U | + (va46 + (vb47 ^ (vc47 ^ vd47)) + xk46 + ti47) >> 9U); abcd[1U] = v46; uint32_t va47 = abcd[0U]; uint32_t vb48 = abcd[1U]; @@ -834,11 +738,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti48 = _t[48U]; uint32_t v47 = - vb48 - + - ((va47 + (vc48 ^ (vb48 | ~vd48)) + xk47 + ti48) - << 6U - | (va47 + (vc48 ^ (vb48 | ~vd48)) + xk47 + ti48) >> 26U); + vb48 + + ((va47 + (vc48 ^ (vb48 | ~vd48)) + xk47 + ti48) << 6U | + (va47 + (vc48 ^ (vb48 | ~vd48)) + xk47 + ti48) >> 26U); abcd[0U] = v47; uint32_t va48 = abcd[3U]; uint32_t vb49 = abcd[0U]; @@ -850,11 +752,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti49 = _t[49U]; uint32_t v48 = - vb49 - + - ((va48 + (vc49 ^ (vb49 | ~vd49)) + xk48 + ti49) - << 10U - | (va48 + (vc49 ^ (vb49 | ~vd49)) + xk48 + ti49) >> 22U); + vb49 + + ((va48 + (vc49 ^ (vb49 | ~vd49)) + xk48 + ti49) << 10U | + (va48 + (vc49 ^ (vb49 | ~vd49)) + xk48 + ti49) >> 22U); abcd[3U] = v48; uint32_t va49 = abcd[2U]; uint32_t vb50 = abcd[3U]; @@ -866,11 +766,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti50 = _t[50U]; uint32_t v49 = - vb50 - + - ((va49 + (vc50 ^ (vb50 | ~vd50)) + xk49 + ti50) - << 15U - | (va49 + (vc50 ^ (vb50 | ~vd50)) + xk49 + ti50) >> 17U); + vb50 + + ((va49 + (vc50 ^ (vb50 | ~vd50)) + xk49 + ti50) << 15U | + (va49 + (vc50 ^ (vb50 | ~vd50)) + xk49 + ti50) >> 17U); abcd[2U] = v49; uint32_t va50 = abcd[1U]; uint32_t vb51 = abcd[2U]; @@ -882,11 +780,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti51 = _t[51U]; uint32_t v50 = - vb51 - + - ((va50 + (vc51 ^ (vb51 | ~vd51)) + xk50 + ti51) - << 21U - | (va50 + (vc51 ^ (vb51 | ~vd51)) + xk50 + ti51) >> 11U); + vb51 + + ((va50 + (vc51 ^ (vb51 | ~vd51)) + xk50 + ti51) << 21U | + (va50 + (vc51 ^ (vb51 | ~vd51)) + xk50 + ti51) >> 11U); abcd[1U] = v50; uint32_t va51 = abcd[0U]; uint32_t vb52 = abcd[1U]; @@ -898,11 +794,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti52 = _t[52U]; uint32_t v51 = - vb52 - + - ((va51 + (vc52 ^ (vb52 | ~vd52)) + xk51 + ti52) - << 6U - | (va51 + (vc52 ^ (vb52 | ~vd52)) + xk51 + ti52) >> 26U); + vb52 + + ((va51 + (vc52 ^ (vb52 | ~vd52)) + xk51 + ti52) << 6U | + (va51 + (vc52 ^ (vb52 | ~vd52)) + xk51 + ti52) >> 26U); abcd[0U] = v51; uint32_t va52 = abcd[3U]; uint32_t vb53 = abcd[0U]; @@ -914,11 +808,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti53 = _t[53U]; uint32_t v52 = - vb53 - + - ((va52 + (vc53 ^ (vb53 | ~vd53)) + xk52 + ti53) - << 10U - | (va52 + (vc53 ^ (vb53 | ~vd53)) + xk52 + ti53) >> 22U); + vb53 + + ((va52 + (vc53 ^ (vb53 | ~vd53)) + xk52 + ti53) << 10U | + (va52 + (vc53 ^ (vb53 | ~vd53)) + xk52 + ti53) >> 22U); abcd[3U] = v52; uint32_t va53 = abcd[2U]; uint32_t vb54 = abcd[3U]; @@ -930,11 +822,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti54 = _t[54U]; uint32_t v53 = - vb54 - + - ((va53 + (vc54 ^ (vb54 | ~vd54)) + xk53 + ti54) - << 15U - | (va53 + (vc54 ^ (vb54 | ~vd54)) + xk53 + ti54) >> 17U); + vb54 + + ((va53 + (vc54 ^ (vb54 | ~vd54)) + xk53 + ti54) << 15U | + (va53 + (vc54 ^ (vb54 | ~vd54)) + xk53 + ti54) >> 17U); abcd[2U] = v53; uint32_t va54 = abcd[1U]; uint32_t vb55 = abcd[2U]; @@ -946,11 +836,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti55 = _t[55U]; uint32_t v54 = - vb55 - + - ((va54 + (vc55 ^ (vb55 | ~vd55)) + xk54 + ti55) - << 21U - | (va54 + (vc55 ^ (vb55 | ~vd55)) + xk54 + ti55) >> 11U); + vb55 + + ((va54 + (vc55 ^ (vb55 | ~vd55)) + xk54 + ti55) << 21U | + (va54 + (vc55 ^ (vb55 | ~vd55)) + xk54 + ti55) >> 11U); abcd[1U] = v54; uint32_t va55 = abcd[0U]; uint32_t vb56 = abcd[1U]; @@ -962,11 +850,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti56 = _t[56U]; uint32_t v55 = - vb56 - + - ((va55 + (vc56 ^ (vb56 | ~vd56)) + xk55 + ti56) - << 6U - | (va55 + (vc56 ^ (vb56 | ~vd56)) + xk55 + ti56) >> 26U); + vb56 + + ((va55 + (vc56 ^ (vb56 | ~vd56)) + xk55 + ti56) << 6U | + (va55 + (vc56 ^ (vb56 | ~vd56)) + xk55 + ti56) >> 26U); abcd[0U] = v55; uint32_t va56 = abcd[3U]; uint32_t vb57 = abcd[0U]; @@ -978,11 +864,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti57 = _t[57U]; uint32_t v56 = - vb57 - + - ((va56 + (vc57 ^ (vb57 | ~vd57)) + xk56 + ti57) - << 10U - | (va56 + (vc57 ^ (vb57 | ~vd57)) + xk56 + ti57) >> 22U); + vb57 + + ((va56 + (vc57 ^ (vb57 | ~vd57)) + xk56 + ti57) << 10U | + (va56 + (vc57 ^ (vb57 | ~vd57)) + xk56 + ti57) >> 22U); abcd[3U] = v56; uint32_t va57 = abcd[2U]; uint32_t vb58 = abcd[3U]; @@ -994,11 +878,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti58 = _t[58U]; uint32_t v57 = - vb58 - + - ((va57 + (vc58 ^ (vb58 | ~vd58)) + xk57 + ti58) - << 15U - | (va57 + (vc58 ^ (vb58 | ~vd58)) + xk57 + ti58) >> 17U); + vb58 + + ((va57 + (vc58 ^ (vb58 | ~vd58)) + xk57 + ti58) << 15U | + (va57 + (vc58 ^ (vb58 | ~vd58)) + xk57 + ti58) >> 17U); abcd[2U] = v57; uint32_t va58 = abcd[1U]; uint32_t vb59 = abcd[2U]; @@ -1010,11 +892,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti59 = _t[59U]; uint32_t v58 = - vb59 - + - ((va58 + (vc59 ^ (vb59 | ~vd59)) + xk58 + ti59) - << 21U - | (va58 + (vc59 ^ (vb59 | ~vd59)) + xk58 + ti59) >> 11U); + vb59 + + ((va58 + (vc59 ^ (vb59 | ~vd59)) + xk58 + ti59) << 21U | + (va58 + (vc59 ^ (vb59 | ~vd59)) + xk58 + ti59) >> 11U); abcd[1U] = v58; uint32_t va59 = abcd[0U]; uint32_t vb60 = abcd[1U]; @@ -1026,11 +906,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti60 = _t[60U]; uint32_t v59 = - vb60 - + - ((va59 + (vc60 ^ (vb60 | ~vd60)) + xk59 + ti60) - << 6U - | (va59 + (vc60 ^ (vb60 | ~vd60)) + xk59 + ti60) >> 26U); + vb60 + + ((va59 + (vc60 ^ (vb60 | ~vd60)) + xk59 + ti60) << 6U | + (va59 + (vc60 ^ (vb60 | ~vd60)) + xk59 + ti60) >> 26U); abcd[0U] = v59; uint32_t va60 = abcd[3U]; uint32_t vb61 = abcd[0U]; @@ -1042,11 +920,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti61 = _t[61U]; uint32_t v60 = - vb61 - + - ((va60 + (vc61 ^ (vb61 | ~vd61)) + xk60 + ti61) - << 10U - | (va60 + (vc61 ^ (vb61 | ~vd61)) + xk60 + ti61) >> 22U); + vb61 + + ((va60 + (vc61 ^ (vb61 | ~vd61)) + xk60 + ti61) << 10U | + (va60 + (vc61 ^ (vb61 | ~vd61)) + xk60 + ti61) >> 22U); abcd[3U] = v60; uint32_t va61 = abcd[2U]; uint32_t vb62 = abcd[3U]; @@ -1058,11 +934,9 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti62 = _t[62U]; uint32_t v61 = - vb62 - + - ((va61 + (vc62 ^ (vb62 | ~vd62)) + xk61 + ti62) - << 15U - | (va61 + (vc62 ^ (vb62 | ~vd62)) + xk61 + ti62) >> 17U); + vb62 + + ((va61 + (vc62 ^ (vb62 | ~vd62)) + xk61 + ti62) << 15U | + (va61 + (vc62 ^ (vb62 | ~vd62)) + xk61 + ti62) >> 17U); abcd[2U] = v61; uint32_t va62 = abcd[1U]; uint32_t vb = abcd[2U]; @@ -1074,11 +948,8 @@ static void update(uint32_t *abcd, uint8_t *x) uint32_t ti = _t[63U]; uint32_t v62 = - vb - + - ((va62 + (vc ^ (vb | ~vd)) + xk62 + ti) - << 21U - | (va62 + (vc ^ (vb | ~vd)) + xk62 + ti) >> 11U); + vb + + ((va62 + (vc ^ (vb | ~vd)) + xk62 + ti) << 21U | (va62 + (vc ^ (vb | ~vd)) + xk62 + ti) >> 11U); abcd[1U] = v62; uint32_t a = abcd[0U]; uint32_t b = abcd[1U]; @@ -1282,8 +1153,7 @@ Hacl_Hash_MD5_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, @@ -1328,8 +1198,7 @@ Hacl_Hash_MD5_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t Hacl_Hash_MD5_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, @@ -1359,8 +1228,7 @@ Hacl_Hash_MD5_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state10, @@ -1403,8 +1271,7 @@ Hacl_Hash_MD5_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t Hacl_Hash_MD5_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, diff --git a/Modules/_hacl/Hacl_Hash_SHA1.c b/Modules/_hacl/Hacl_Hash_SHA1.c index 508e447bf27..97bd4f2204c 100644 --- a/Modules/_hacl/Hacl_Hash_SHA1.c +++ b/Modules/_hacl/Hacl_Hash_SHA1.c @@ -315,8 +315,7 @@ Hacl_Hash_SHA1_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_ uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, @@ -361,8 +360,7 @@ Hacl_Hash_SHA1_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_ Hacl_Hash_SHA1_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, @@ -392,8 +390,7 @@ Hacl_Hash_SHA1_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_ uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state10, @@ -436,8 +433,7 @@ Hacl_Hash_SHA1_update(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_ Hacl_Hash_SHA1_update_multi(block_state1, data1, data1_len / 64U); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, diff --git a/Modules/_hacl/Hacl_Hash_SHA2.c b/Modules/_hacl/Hacl_Hash_SHA2.c index d612bafa72c..d2ee0c9ef51 100644 --- a/Modules/_hacl/Hacl_Hash_SHA2.c +++ b/Modules/_hacl/Hacl_Hash_SHA2.c @@ -100,15 +100,14 @@ static inline void sha256_update(uint8_t *b, uint32_t *hash) uint32_t k_e_t = k_t; uint32_t t1 = - h02 - + ((e0 << 26U | e0 >> 6U) ^ ((e0 << 21U | e0 >> 11U) ^ (e0 << 7U | e0 >> 25U))) - + ((e0 & f0) ^ (~e0 & g0)) + h02 + ((e0 << 26U | e0 >> 6U) ^ ((e0 << 21U | e0 >> 11U) ^ (e0 << 7U | e0 >> 25U))) + + ((e0 & f0) ^ (~e0 & g0)) + k_e_t + ws_t; uint32_t t2 = - ((a0 << 30U | a0 >> 2U) ^ ((a0 << 19U | a0 >> 13U) ^ (a0 << 10U | a0 >> 22U))) - + ((a0 & b0) ^ ((a0 & c0) ^ (b0 & c0))); + ((a0 << 30U | a0 >> 2U) ^ ((a0 << 19U | a0 >> 13U) ^ (a0 << 10U | a0 >> 22U))) + + ((a0 & b0) ^ ((a0 & c0) ^ (b0 & c0))); uint32_t a1 = t1 + t2; uint32_t b1 = a0; uint32_t c1 = b0; @@ -301,15 +300,14 @@ static inline void sha512_update(uint8_t *b, uint64_t *hash) uint64_t k_e_t = k_t; uint64_t t1 = - h02 - + ((e0 << 50U | e0 >> 14U) ^ ((e0 << 46U | e0 >> 18U) ^ (e0 << 23U | e0 >> 41U))) - + ((e0 & f0) ^ (~e0 & g0)) + h02 + ((e0 << 50U | e0 >> 14U) ^ ((e0 << 46U | e0 >> 18U) ^ (e0 << 23U | e0 >> 41U))) + + ((e0 & f0) ^ (~e0 & g0)) + k_e_t + ws_t; uint64_t t2 = - ((a0 << 36U | a0 >> 28U) ^ ((a0 << 30U | a0 >> 34U) ^ (a0 << 25U | a0 >> 39U))) - + ((a0 & b0) ^ ((a0 & c0) ^ (b0 & c0))); + ((a0 << 36U | a0 >> 28U) ^ ((a0 << 30U | a0 >> 34U) ^ (a0 << 25U | a0 >> 39U))) + + ((a0 & b0) ^ ((a0 & c0) ^ (b0 & c0))); uint64_t a1 = t1 + t2; uint64_t b1 = a0; uint64_t c1 = b0; @@ -639,8 +637,7 @@ update_224_256(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, @@ -685,8 +682,7 @@ update_224_256(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk Hacl_Hash_SHA2_sha256_update_nblocks(data1_len / 64U * 64U, data1, block_state1); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, @@ -716,8 +712,7 @@ update_224_256(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state10, @@ -760,8 +755,7 @@ update_224_256(Hacl_Streaming_MD_state_32 *state, uint8_t *chunk, uint32_t chunk Hacl_Hash_SHA2_sha256_update_nblocks(data1_len / 64U * 64U, data1, block_state1); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_32){ .block_state = block_state1, @@ -1205,8 +1199,7 @@ update_384_512(Hacl_Streaming_MD_state_64 *state, uint8_t *chunk, uint32_t chunk uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Streaming_MD_state_64){ .block_state = block_state1, @@ -1251,8 +1244,7 @@ update_384_512(Hacl_Streaming_MD_state_64 *state, uint8_t *chunk, uint32_t chunk Hacl_Hash_SHA2_sha512_update_nblocks(data1_len / 128U * 128U, data1, block_state1); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_64){ .block_state = block_state1, @@ -1282,8 +1274,7 @@ update_384_512(Hacl_Streaming_MD_state_64 *state, uint8_t *chunk, uint32_t chunk uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Streaming_MD_state_64){ .block_state = block_state10, @@ -1326,8 +1317,7 @@ update_384_512(Hacl_Streaming_MD_state_64 *state, uint8_t *chunk, uint32_t chunk Hacl_Hash_SHA2_sha512_update_nblocks(data1_len / 128U * 128U, data1, block_state1); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_MD_state_64){ .block_state = block_state1, diff --git a/Modules/_hacl/Hacl_Hash_SHA3.c b/Modules/_hacl/Hacl_Hash_SHA3.c index 87638df9549..466d2b96c0c 100644 --- a/Modules/_hacl/Hacl_Hash_SHA3.c +++ b/Modules/_hacl/Hacl_Hash_SHA3.c @@ -866,8 +866,7 @@ Hacl_Hash_SHA3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *chunk, uint32_t ch uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ((Hacl_Hash_SHA3_state_t){ .block_state = block_state1, .buf = buf, .total_len = total_len2 }); } else if (sz == 0U) @@ -910,8 +909,7 @@ Hacl_Hash_SHA3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *chunk, uint32_t ch Hacl_Hash_SHA3_update_multi_sha3(a1, s2, data1, data1_len / block_len(a1)); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_SHA3_state_t){ .block_state = block_state1, @@ -941,8 +939,7 @@ Hacl_Hash_SHA3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *chunk, uint32_t ch uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Hash_SHA3_state_t){ .block_state = block_state10, @@ -972,10 +969,8 @@ Hacl_Hash_SHA3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *chunk, uint32_t ch uint32_t ite; if ( - (uint64_t)(chunk_len - diff) - % (uint64_t)block_len(i) - == 0ULL - && (uint64_t)(chunk_len - diff) > 0ULL + (uint64_t)(chunk_len - diff) % (uint64_t)block_len(i) == 0ULL && + (uint64_t)(chunk_len - diff) > 0ULL ) { ite = block_len(i); @@ -994,8 +989,7 @@ Hacl_Hash_SHA3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *chunk, uint32_t ch Hacl_Hash_SHA3_update_multi_sha3(a1, s2, data1, data1_len / block_len(a1)); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Hash_SHA3_state_t){ .block_state = block_state1, @@ -2422,9 +2416,7 @@ Hacl_Hash_SHA3_shake128_squeeze_nblocks( 5U, 1U, _C[i] = - state[i - + 0U] - ^ (state[i + 5U] ^ (state[i + 10U] ^ (state[i + 15U] ^ state[i + 20U])));); + state[i + 0U] ^ (state[i + 5U] ^ (state[i + 10U] ^ (state[i + 15U] ^ state[i + 20U])));); KRML_MAYBE_FOR5(i2, 0U, 5U, diff --git a/Modules/_hacl/Hacl_Streaming_HMAC.c b/Modules/_hacl/Hacl_Streaming_HMAC.c index d28b39792af..8dd7e2c0bf3 100644 --- a/Modules/_hacl/Hacl_Streaming_HMAC.c +++ b/Modules/_hacl/Hacl_Streaming_HMAC.c @@ -198,8 +198,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = ((Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_MD5_a, { .case_MD5_a = s1 } }); + st[0U] = ((Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_MD5_a, { .case_MD5_a = s1 } }); } if (st == NULL) { @@ -220,8 +219,8 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = ((Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA1_a, { .case_SHA1_a = s1 } }); + st[0U] = + ((Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA1_a, { .case_SHA1_a = s1 } }); } if (st == NULL) { @@ -242,8 +241,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA2_224_a, @@ -270,8 +268,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA2_256_a, @@ -298,8 +295,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA2_384_a, @@ -326,8 +322,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA2_512_a, @@ -354,8 +349,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA3_224_a, @@ -382,8 +376,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA3_256_a, @@ -410,8 +403,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA3_384_a, @@ -438,8 +430,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_SHA3_512_a, @@ -466,8 +457,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_Blake2S_a, @@ -495,8 +485,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_Blake2S_128_a, @@ -531,8 +520,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_Blake2B_a, @@ -560,8 +548,7 @@ static Hacl_Agile_Hash_state_s *malloc_(Hacl_Agile_Hash_impl a) *st = (Hacl_Agile_Hash_state_s *)KRML_HOST_MALLOC(sizeof (Hacl_Agile_Hash_state_s)); if (st != NULL) { - st[0U] - = + st[0U] = ( (Hacl_Agile_Hash_state_s){ .tag = Hacl_Agile_Hash_Blake2B_256_a, @@ -2059,8 +2046,8 @@ Hacl_Streaming_HMAC_update( Hacl_Streaming_HMAC_Definitions_index i1 = Hacl_Streaming_HMAC_index_of_state(block_state); if ( - (uint64_t)chunk_len - > max_input_len64(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - total_len + (uint64_t)chunk_len > + max_input_len64(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - total_len ) { return Hacl_Streaming_Types_MaximumLengthExceeded; @@ -2068,9 +2055,7 @@ Hacl_Streaming_HMAC_update( uint32_t sz; if ( - total_len - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - == 0ULL + total_len % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == 0ULL && total_len > 0ULL ) { @@ -2079,8 +2064,8 @@ Hacl_Streaming_HMAC_update( else { sz = - (uint32_t)(total_len - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)(total_len % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } if (chunk_len <= block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - sz) { @@ -2091,9 +2076,7 @@ Hacl_Streaming_HMAC_update( uint32_t sz1; if ( - total_len1 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - == 0ULL + total_len1 % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == 0ULL && total_len1 > 0ULL ) { @@ -2102,14 +2085,13 @@ Hacl_Streaming_HMAC_update( else { sz1 = - (uint32_t)(total_len1 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)(total_len1 % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } uint8_t *buf2 = buf + sz1; memcpy(buf2, chunk, chunk_len * sizeof (uint8_t)); uint64_t total_len2 = total_len1 + (uint64_t)chunk_len; - *state - = + *state = ( (Hacl_Streaming_HMAC_agile_state){ .block_state = block_state1, @@ -2127,9 +2109,7 @@ Hacl_Streaming_HMAC_update( uint32_t sz1; if ( - total_len1 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - == 0ULL + total_len1 % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == 0ULL && total_len1 > 0ULL ) { @@ -2138,8 +2118,8 @@ Hacl_Streaming_HMAC_update( else { sz1 = - (uint32_t)(total_len1 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)(total_len1 % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } if (!(sz1 == 0U)) { @@ -2153,8 +2133,8 @@ Hacl_Streaming_HMAC_update( uint32_t ite; if ( - (uint64_t)chunk_len - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) + (uint64_t)chunk_len % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == 0ULL && (uint64_t)chunk_len > 0ULL ) @@ -2164,8 +2144,8 @@ Hacl_Streaming_HMAC_update( else { ite = - (uint32_t)((uint64_t)chunk_len - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)((uint64_t)chunk_len % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } uint32_t n_blocks = (chunk_len - ite) / block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))); @@ -2178,8 +2158,7 @@ Hacl_Streaming_HMAC_update( update_multi(s11, total_len1, data1, data1_len); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_HMAC_agile_state){ .block_state = block_state1, @@ -2200,9 +2179,8 @@ Hacl_Streaming_HMAC_update( uint32_t sz10; if ( - total_len10 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - == 0ULL + total_len10 % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == + 0ULL && total_len10 > 0ULL ) { @@ -2211,14 +2189,13 @@ Hacl_Streaming_HMAC_update( else { sz10 = - (uint32_t)(total_len10 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)(total_len10 % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } uint8_t *buf2 = buf0 + sz10; memcpy(buf2, chunk1, diff * sizeof (uint8_t)); uint64_t total_len2 = total_len10 + (uint64_t)diff; - *state - = + *state = ( (Hacl_Streaming_HMAC_agile_state){ .block_state = block_state10, @@ -2233,9 +2210,7 @@ Hacl_Streaming_HMAC_update( uint32_t sz1; if ( - total_len1 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - == 0ULL + total_len1 % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == 0ULL && total_len1 > 0ULL ) { @@ -2244,8 +2219,8 @@ Hacl_Streaming_HMAC_update( else { sz1 = - (uint32_t)(total_len1 - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)(total_len1 % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } if (!(sz1 == 0U)) { @@ -2259,8 +2234,8 @@ Hacl_Streaming_HMAC_update( uint32_t ite; if ( - (uint64_t)(chunk_len - diff) - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) + (uint64_t)(chunk_len - diff) % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == 0ULL && (uint64_t)(chunk_len - diff) > 0ULL ) @@ -2270,13 +2245,12 @@ Hacl_Streaming_HMAC_update( else { ite = - (uint32_t)((uint64_t)(chunk_len - diff) - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)((uint64_t)(chunk_len - diff) % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } uint32_t n_blocks = - (chunk_len - diff - ite) - / block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))); + (chunk_len - diff - ite) / block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))); uint32_t data1_len = n_blocks * block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))); uint32_t data2_len = chunk_len - diff - data1_len; @@ -2286,8 +2260,7 @@ Hacl_Streaming_HMAC_update( update_multi(s11, total_len1, data1, data1_len); uint8_t *dst = buf; memcpy(dst, data2, data2_len * sizeof (uint8_t)); - *state - = + *state = ( (Hacl_Streaming_HMAC_agile_state){ .block_state = block_state1, @@ -2324,9 +2297,7 @@ Hacl_Streaming_HMAC_digest( uint32_t r; if ( - total_len - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) - == 0ULL + total_len % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1))) == 0ULL && total_len > 0ULL ) { @@ -2335,8 +2306,8 @@ Hacl_Streaming_HMAC_digest( else { r = - (uint32_t)(total_len - % (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); + (uint32_t)(total_len % + (uint64_t)block_len(alg_of_impl(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)))); } uint8_t *buf_1 = buf_; Hacl_Agile_Hash_state_s *s110 = malloc_(dfst__Hacl_Agile_Hash_impl_uint32_t(i1)); diff --git a/Modules/_hacl/Lib_Memzero0.c b/Modules/_hacl/Lib_Memzero0.c index 28abd1aa4e2..f94e0e2254a 100644 --- a/Modules/_hacl/Lib_Memzero0.c +++ b/Modules/_hacl/Lib_Memzero0.c @@ -11,18 +11,18 @@ #if defined(__APPLE__) && defined(__MACH__) #include <AvailabilityMacros.h> // memset_s is available from macOS 10.9, iOS 7, watchOS 2, and on all tvOS and visionOS versions. -# if (defined(MAC_OS_X_VERSION_MIN_REQUIRED) && (MAC_OS_X_VERSION_MIN_REQUIRED >= __MAC_10_9)) -# define APPLE_HAS_MEMSET_S 1 -# elif (defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && (__IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_7_0)) -# define APPLE_HAS_MEMSET_S 1 +# if (defined(MAC_OS_X_VERSION_MIN_REQUIRED) && defined(MAC_OS_X_VERSION_10_9) && (MAC_OS_X_VERSION_MIN_REQUIRED >= MAC_OS_X_VERSION_10_9)) +# define APPLE_HAS_MEMSET_S +# elif (defined(__IPHONE_OS_VERSION_MIN_REQUIRED) && defined(__IPHONE_7_0) && (__IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_7_0)) +# define APPLE_HAS_MEMSET_S # elif (defined(TARGET_OS_TV) && TARGET_OS_TV) -# define APPLE_HAS_MEMSET_S 1 -# elif (defined(__WATCH_OS_VERSION_MIN_REQUIRED) && (__WATCH_OS_VERSION_MIN_REQUIRED >= __WATCHOS_2_0)) -# define APPLE_HAS_MEMSET_S 1 +# define APPLE_HAS_MEMSET_S +# elif (defined(__WATCH_OS_VERSION_MIN_REQUIRED) && defined(__WATCHOS_2_0) && (__WATCH_OS_VERSION_MIN_REQUIRED >= __WATCHOS_2_0)) +# define APPLE_HAS_MEMSET_S # elif (defined(TARGET_OS_VISION) && TARGET_OS_VISION) -# define APPLE_HAS_MEMSET_S 1 +# define APPLE_HAS_MEMSET_S # else -# define APPLE_HAS_MEMSET_S 0 +# undef APPLE_HAS_MEMSET_S # endif #endif @@ -55,7 +55,7 @@ void Lib_Memzero0_memzero0(void *dst, uint64_t len) { #ifdef _WIN32 SecureZeroMemory(dst, len_); - #elif defined(__APPLE__) && defined(__MACH__) && APPLE_HAS_MEMSET_S + #elif defined(__APPLE__) && defined(__MACH__) && defined(APPLE_HAS_MEMSET_S) memset_s(dst, len_, 0, len_); #elif (defined(__linux__) && !defined(LINUX_NO_EXPLICIT_BZERO)) || defined(__FreeBSD__) || defined(__OpenBSD__) explicit_bzero(dst, len_); diff --git a/Modules/_hacl/include/krml/FStar_UInt128_Verified.h b/Modules/_hacl/include/krml/FStar_UInt128_Verified.h index d4a90220bea..f85982f3373 100644 --- a/Modules/_hacl/include/krml/FStar_UInt128_Verified.h +++ b/Modules/_hacl/include/krml/FStar_UInt128_Verified.h @@ -257,11 +257,11 @@ FStar_UInt128_gte_mask(FStar_UInt128_uint128 a, FStar_UInt128_uint128 b) { FStar_UInt128_uint128 lit; lit.low = - (FStar_UInt64_gte_mask(a.high, b.high) & ~FStar_UInt64_eq_mask(a.high, b.high)) - | (FStar_UInt64_eq_mask(a.high, b.high) & FStar_UInt64_gte_mask(a.low, b.low)); + (FStar_UInt64_gte_mask(a.high, b.high) & ~FStar_UInt64_eq_mask(a.high, b.high)) | + (FStar_UInt64_eq_mask(a.high, b.high) & FStar_UInt64_gte_mask(a.low, b.low)); lit.high = - (FStar_UInt64_gte_mask(a.high, b.high) & ~FStar_UInt64_eq_mask(a.high, b.high)) - | (FStar_UInt64_eq_mask(a.high, b.high) & FStar_UInt64_gte_mask(a.low, b.low)); + (FStar_UInt64_gte_mask(a.high, b.high) & ~FStar_UInt64_eq_mask(a.high, b.high)) | + (FStar_UInt64_eq_mask(a.high, b.high) & FStar_UInt64_gte_mask(a.low, b.low)); return lit; } @@ -294,14 +294,12 @@ static inline FStar_UInt128_uint128 FStar_UInt128_mul32(uint64_t x, uint32_t y) { FStar_UInt128_uint128 lit; lit.low = - FStar_UInt128_u32_combine((x >> FStar_UInt128_u32_32) - * (uint64_t)y - + (FStar_UInt128_u64_mod_32(x) * (uint64_t)y >> FStar_UInt128_u32_32), + FStar_UInt128_u32_combine((x >> FStar_UInt128_u32_32) * (uint64_t)y + + (FStar_UInt128_u64_mod_32(x) * (uint64_t)y >> FStar_UInt128_u32_32), FStar_UInt128_u64_mod_32(FStar_UInt128_u64_mod_32(x) * (uint64_t)y)); lit.high = - ((x >> FStar_UInt128_u32_32) - * (uint64_t)y - + (FStar_UInt128_u64_mod_32(x) * (uint64_t)y >> FStar_UInt128_u32_32)) + ((x >> FStar_UInt128_u32_32) * (uint64_t)y + + (FStar_UInt128_u64_mod_32(x) * (uint64_t)y >> FStar_UInt128_u32_32)) >> FStar_UInt128_u32_32; return lit; } @@ -315,28 +313,19 @@ static inline FStar_UInt128_uint128 FStar_UInt128_mul_wide(uint64_t x, uint64_t { FStar_UInt128_uint128 lit; lit.low = - FStar_UInt128_u32_combine_(FStar_UInt128_u64_mod_32(x) - * (y >> FStar_UInt128_u32_32) - + - FStar_UInt128_u64_mod_32((x >> FStar_UInt128_u32_32) - * FStar_UInt128_u64_mod_32(y) - + (FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y) >> FStar_UInt128_u32_32)), + FStar_UInt128_u32_combine_(FStar_UInt128_u64_mod_32(x) * (y >> FStar_UInt128_u32_32) + + FStar_UInt128_u64_mod_32((x >> FStar_UInt128_u32_32) * FStar_UInt128_u64_mod_32(y) + + (FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y) >> FStar_UInt128_u32_32)), FStar_UInt128_u64_mod_32(FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y))); lit.high = - (x >> FStar_UInt128_u32_32) - * (y >> FStar_UInt128_u32_32) - + - (((x >> FStar_UInt128_u32_32) - * FStar_UInt128_u64_mod_32(y) - + (FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y) >> FStar_UInt128_u32_32)) + (x >> FStar_UInt128_u32_32) * (y >> FStar_UInt128_u32_32) + + (((x >> FStar_UInt128_u32_32) * FStar_UInt128_u64_mod_32(y) + + (FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y) >> FStar_UInt128_u32_32)) >> FStar_UInt128_u32_32) + - ((FStar_UInt128_u64_mod_32(x) - * (y >> FStar_UInt128_u32_32) - + - FStar_UInt128_u64_mod_32((x >> FStar_UInt128_u32_32) - * FStar_UInt128_u64_mod_32(y) - + (FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y) >> FStar_UInt128_u32_32))) + ((FStar_UInt128_u64_mod_32(x) * (y >> FStar_UInt128_u32_32) + + FStar_UInt128_u64_mod_32((x >> FStar_UInt128_u32_32) * FStar_UInt128_u64_mod_32(y) + + (FStar_UInt128_u64_mod_32(x) * FStar_UInt128_u64_mod_32(y) >> FStar_UInt128_u32_32))) >> FStar_UInt128_u32_32); return lit; } diff --git a/Modules/_hacl/refresh.sh b/Modules/_hacl/refresh.sh index d91650b44bb..a6776282423 100755 --- a/Modules/_hacl/refresh.sh +++ b/Modules/_hacl/refresh.sh @@ -22,7 +22,7 @@ fi # Update this when updating to a new version after verifying that the changes # the update brings in are good. -expected_hacl_star_rev=7720f6d4fc0468a99d5ea6120976bcc271e42727 +expected_hacl_star_rev=4ef25b547b377dcef855db4289c6a00580e7221c hacl_dir="$(realpath "$1")" cd "$(dirname "$0")" diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 92f744c5a5f..76bd76cc6b2 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -1690,11 +1690,12 @@ create_interpreter(PyObject *self, PyObject *args, PyObject *kwargs) static PyObject * destroy_interpreter(PyObject *self, PyObject *args, PyObject *kwargs) { - static char *kwlist[] = {"id", NULL}; + static char *kwlist[] = {"id", "basic", NULL}; PyObject *idobj = NULL; + int basic = 0; if (!PyArg_ParseTupleAndKeywords(args, kwargs, - "O:destroy_interpreter", kwlist, - &idobj)) + "O|p:destroy_interpreter", kwlist, + &idobj, &basic)) { return NULL; } @@ -1704,7 +1705,27 @@ destroy_interpreter(PyObject *self, PyObject *args, PyObject *kwargs) return NULL; } - _PyXI_EndInterpreter(interp, NULL, NULL); + if (basic) + { + // Test the basic Py_EndInterpreter with weird out of order thread states + PyThreadState *t1, *t2; + PyThreadState *prev; + t1 = interp->threads.head; + if (t1 == NULL) { + t1 = PyThreadState_New(interp); + } + t2 = PyThreadState_New(interp); + prev = PyThreadState_Swap(t2); + PyThreadState_Clear(t1); + PyThreadState_Delete(t1); + Py_EndInterpreter(t2); + PyThreadState_Swap(prev); + } + else + { + // use the cross interpreter _PyXI_EndInterpreter normally + _PyXI_EndInterpreter(interp, NULL, NULL); + } Py_RETURN_NONE; } diff --git a/Modules/_winapi.c b/Modules/_winapi.c index 02817e09b93..044505fab62 100644 --- a/Modules/_winapi.c +++ b/Modules/_winapi.c @@ -1573,6 +1573,7 @@ static PyObject * _winapi_GetLongPathName_impl(PyObject *module, LPCWSTR path) /*[clinic end generated code: output=c4774b080275a2d0 input=9872e211e3a4a88f]*/ { +#if defined(MS_WINDOWS_APP) || defined(MS_WINDOWS_SYSTEM) DWORD cchBuffer; PyObject *result = NULL; @@ -1596,6 +1597,9 @@ _winapi_GetLongPathName_impl(PyObject *module, LPCWSTR path) PyErr_SetFromWindowsErr(0); } return result; +#else + return PyUnicode_FromWideChar(path, -1); +#endif } /*[clinic input] @@ -1629,9 +1633,11 @@ _winapi_GetModuleFileName_impl(PyObject *module, HMODULE module_handle) if (! result) return PyErr_SetFromWindowsErr(GetLastError()); - return PyUnicode_FromWideChar(filename, wcslen(filename)); + return PyUnicode_FromWideChar(filename, -1); } +#if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) + /*[clinic input] _winapi.GetShortPathName @@ -1674,6 +1680,8 @@ _winapi_GetShortPathName_impl(PyObject *module, LPCWSTR path) return result; } +#endif /* MS_WINDOWS_DESKTOP || MS_WINDOWS_SYSTEM */ + /*[clinic input] _winapi.GetStdHandle -> HANDLE @@ -2883,6 +2891,7 @@ _winapi_NeedCurrentDirectoryForExePath_impl(PyObject *module, LPCWSTR exe_name) /*[clinic end generated code: output=a65ec879502b58fc input=972aac88a1ec2f00]*/ { +#if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) BOOL result; Py_BEGIN_ALLOW_THREADS @@ -2890,6 +2899,9 @@ _winapi_NeedCurrentDirectoryForExePath_impl(PyObject *module, Py_END_ALLOW_THREADS return result; +#else + return TRUE; +#endif } diff --git a/Modules/clinic/_dbmmodule.c.h b/Modules/clinic/_dbmmodule.c.h index 5e503194408..091ce9edc43 100644 --- a/Modules/clinic/_dbmmodule.c.h +++ b/Modules/clinic/_dbmmodule.c.h @@ -5,6 +5,7 @@ preserve #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # include "pycore_runtime.h" // _Py_SINGLETON() #endif +#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() #include "pycore_modsupport.h" // _PyArg_UnpackKeywords() PyDoc_STRVAR(_dbm_dbm_close__doc__, @@ -22,7 +23,13 @@ _dbm_dbm_close_impl(dbmobject *self); static PyObject * _dbm_dbm_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _dbm_dbm_close_impl((dbmobject *)self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _dbm_dbm_close_impl((dbmobject *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; } PyDoc_STRVAR(_dbm_dbm_keys__doc__, @@ -40,11 +47,18 @@ _dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls); static PyObject * _dbm_dbm_keys(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { + PyObject *return_value = NULL; + if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); - return NULL; + goto exit; } - return _dbm_dbm_keys_impl((dbmobject *)self, cls); + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _dbm_dbm_keys_impl((dbmobject *)self, cls); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; } PyDoc_STRVAR(_dbm_dbm_get__doc__, @@ -85,7 +99,9 @@ _dbm_dbm_get(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_ &key, &key_length, &default_value)) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = _dbm_dbm_get_impl((dbmobject *)self, cls, key, key_length, default_value); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -131,7 +147,9 @@ _dbm_dbm_setdefault(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py &key, &key_length, &default_value)) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = _dbm_dbm_setdefault_impl((dbmobject *)self, cls, key, key_length, default_value); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -152,11 +170,18 @@ _dbm_dbm_clear_impl(dbmobject *self, PyTypeObject *cls); static PyObject * _dbm_dbm_clear(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { + PyObject *return_value = NULL; + if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); - return NULL; + goto exit; } - return _dbm_dbm_clear_impl((dbmobject *)self, cls); + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _dbm_dbm_clear_impl((dbmobject *)self, cls); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; } PyDoc_STRVAR(dbmopen__doc__, @@ -221,4 +246,4 @@ skip_optional: exit: return return_value; } -/*[clinic end generated code: output=3b456118f231b160 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=279511ea7cda38dd input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_gdbmmodule.c.h b/Modules/clinic/_gdbmmodule.c.h index 00950f18e53..6fd6aa3da50 100644 --- a/Modules/clinic/_gdbmmodule.c.h +++ b/Modules/clinic/_gdbmmodule.c.h @@ -5,6 +5,7 @@ preserve #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE) # include "pycore_runtime.h" // _Py_SINGLETON() #endif +#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION() #include "pycore_modsupport.h" // _PyArg_CheckPositional() PyDoc_STRVAR(_gdbm_gdbm_get__doc__, @@ -70,7 +71,9 @@ _gdbm_gdbm_setdefault(PyObject *self, PyObject *const *args, Py_ssize_t nargs) } default_value = args[1]; skip_optional: + Py_BEGIN_CRITICAL_SECTION(self); return_value = _gdbm_gdbm_setdefault_impl((gdbmobject *)self, key, default_value); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -91,7 +94,13 @@ _gdbm_gdbm_close_impl(gdbmobject *self); static PyObject * _gdbm_gdbm_close(PyObject *self, PyObject *Py_UNUSED(ignored)) { - return _gdbm_gdbm_close_impl((gdbmobject *)self); + PyObject *return_value = NULL; + + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _gdbm_gdbm_close_impl((gdbmobject *)self); + Py_END_CRITICAL_SECTION(); + + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_keys__doc__, @@ -109,11 +118,18 @@ _gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * _gdbm_gdbm_keys(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { + PyObject *return_value = NULL; + if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "keys() takes no arguments"); - return NULL; + goto exit; } - return _gdbm_gdbm_keys_impl((gdbmobject *)self, cls); + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _gdbm_gdbm_keys_impl((gdbmobject *)self, cls); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_firstkey__doc__, @@ -135,11 +151,18 @@ _gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * _gdbm_gdbm_firstkey(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { + PyObject *return_value = NULL; + if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "firstkey() takes no arguments"); - return NULL; + goto exit; } - return _gdbm_gdbm_firstkey_impl((gdbmobject *)self, cls); + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _gdbm_gdbm_firstkey_impl((gdbmobject *)self, cls); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_nextkey__doc__, @@ -187,7 +210,9 @@ _gdbm_gdbm_nextkey(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ &key, &key_length)) { goto exit; } + Py_BEGIN_CRITICAL_SECTION(self); return_value = _gdbm_gdbm_nextkey_impl((gdbmobject *)self, cls, key, key_length); + Py_END_CRITICAL_SECTION(); exit: return return_value; @@ -214,11 +239,18 @@ _gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * _gdbm_gdbm_reorganize(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { + PyObject *return_value = NULL; + if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "reorganize() takes no arguments"); - return NULL; + goto exit; } - return _gdbm_gdbm_reorganize_impl((gdbmobject *)self, cls); + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _gdbm_gdbm_reorganize_impl((gdbmobject *)self, cls); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_sync__doc__, @@ -239,11 +271,18 @@ _gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * _gdbm_gdbm_sync(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { + PyObject *return_value = NULL; + if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "sync() takes no arguments"); - return NULL; + goto exit; } - return _gdbm_gdbm_sync_impl((gdbmobject *)self, cls); + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _gdbm_gdbm_sync_impl((gdbmobject *)self, cls); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_clear__doc__, @@ -261,11 +300,18 @@ _gdbm_gdbm_clear_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * _gdbm_gdbm_clear(PyObject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { + PyObject *return_value = NULL; + if (nargs || (kwnames && PyTuple_GET_SIZE(kwnames))) { PyErr_SetString(PyExc_TypeError, "clear() takes no arguments"); - return NULL; + goto exit; } - return _gdbm_gdbm_clear_impl((gdbmobject *)self, cls); + Py_BEGIN_CRITICAL_SECTION(self); + return_value = _gdbm_gdbm_clear_impl((gdbmobject *)self, cls); + Py_END_CRITICAL_SECTION(); + +exit: + return return_value; } PyDoc_STRVAR(dbmopen__doc__, @@ -343,4 +389,4 @@ skip_optional: exit: return return_value; } -/*[clinic end generated code: output=d974cb39e4ee5d67 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=8bca34ce9d4493dd input=a9049054013a1b77]*/ diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index f8b623fca08..b0fc1f1a89b 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -857,6 +857,8 @@ exit: return return_value; } +#if (defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM)) + PyDoc_STRVAR(_winapi_GetShortPathName__doc__, "GetShortPathName($module, /, path)\n" "--\n" @@ -930,6 +932,8 @@ exit: return return_value; } +#endif /* (defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM)) */ + PyDoc_STRVAR(_winapi_GetStdHandle__doc__, "GetStdHandle($module, std_handle, /)\n" "--\n" @@ -2161,4 +2165,8 @@ exit: return return_value; } -/*[clinic end generated code: output=6cd07628af447d0a input=a9049054013a1b77]*/ + +#ifndef _WINAPI_GETSHORTPATHNAME_METHODDEF + #define _WINAPI_GETSHORTPATHNAME_METHODDEF +#endif /* !defined(_WINAPI_GETSHORTPATHNAME_METHODDEF) */ +/*[clinic end generated code: output=ede63eaaf63aa7e6 input=a9049054013a1b77]*/ diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c index d49ce794d88..c94f4f66366 100644 --- a/Modules/faulthandler.c +++ b/Modules/faulthandler.c @@ -1069,18 +1069,6 @@ faulthandler_suppress_crash_report(void) #endif } -static PyObject* _Py_NO_SANITIZE_UNDEFINED -faulthandler_read_null(PyObject *self, PyObject *args) -{ - volatile int *x; - volatile int y; - - faulthandler_suppress_crash_report(); - x = NULL; - y = *x; - return PyLong_FromLong(y); - -} static void faulthandler_raise_sigsegv(void) @@ -1158,23 +1146,12 @@ faulthandler_fatal_error_c_thread(PyObject *self, PyObject *args) Py_RETURN_NONE; } -static PyObject* _Py_NO_SANITIZE_UNDEFINED +static PyObject* faulthandler_sigfpe(PyObject *self, PyObject *Py_UNUSED(dummy)) { faulthandler_suppress_crash_report(); - - /* Do an integer division by zero: raise a SIGFPE on Intel CPU, but not on - PowerPC. Use volatile to disable compile-time optimizations. */ - volatile int x = 1, y = 0, z; - z = x / y; - - /* If the division by zero didn't raise a SIGFPE (e.g. on PowerPC), - raise it manually. */ raise(SIGFPE); - - /* This line is never reached, but we pretend to make something with z - to silence a compiler warning. */ - return PyLong_FromLong(z); + Py_UNREACHABLE(); } static PyObject * @@ -1316,10 +1293,6 @@ static PyMethodDef module_methods[] = { "Unregister the handler of the signal " "'signum' registered by register().")}, #endif - {"_read_null", faulthandler_read_null, METH_NOARGS, - PyDoc_STR("_read_null($module, /)\n--\n\n" - "Read from NULL, raise " - "a SIGSEGV or SIGBUS signal depending on the platform.")}, {"_sigsegv", faulthandler_sigsegv, METH_VARARGS, PyDoc_STR("_sigsegv($module, release_gil=False, /)\n--\n\n" "Raise a SIGSEGV signal.")}, diff --git a/Modules/main.c b/Modules/main.c index ea1239ecc57..2be194bdadf 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -128,7 +128,7 @@ pymain_get_importer(const wchar_t *filename, PyObject **importer_p, int *exitcod { PyObject *sys_path0 = NULL, *importer; - sys_path0 = PyUnicode_FromWideChar(filename, wcslen(filename)); + sys_path0 = PyUnicode_FromWideChar(filename, -1); if (sys_path0 == NULL) { goto error; } @@ -328,7 +328,7 @@ pymain_run_module(const wchar_t *modname, int set_argv0) fprintf(stderr, "Could not import runpy._run_module_as_main\n"); return pymain_exit_err_print(); } - module = PyUnicode_FromWideChar(modname, wcslen(modname)); + module = PyUnicode_FromWideChar(modname, -1); if (module == NULL) { fprintf(stderr, "Could not convert module name to unicode\n"); Py_DECREF(runmodule); @@ -439,7 +439,7 @@ pymain_run_startup(PyConfig *config, int *exitcode) if (env == NULL || env[0] == L'\0') { return 0; } - startup = PyUnicode_FromWideChar(env, wcslen(env)); + startup = PyUnicode_FromWideChar(env, -1); if (startup == NULL) { goto error; } diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index 6a385562845..7c4eb05488e 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -290,6 +290,24 @@ filter_page_exception_method(mmap_object *self, EXCEPTION_POINTERS *ptrs, } return EXCEPTION_CONTINUE_SEARCH; } + +static void +_PyErr_SetFromNTSTATUS(ULONG status) +{ +#if defined(MS_WINDOWS_DESKTOP) || defined(MS_WINDOWS_SYSTEM) + PyErr_SetFromWindowsErr(LsaNtStatusToWinError((NTSTATUS)status)); +#else + if (status & 0x80000000) { + // HRESULT-shaped codes are supported by PyErr_SetFromWindowsErr + PyErr_SetFromWindowsErr((int)status); + } + else { + // No mapping for NTSTATUS values, so just return it for diagnostic purposes + // If we provide it as winerror it could incorrectly change the type of the exception. + PyErr_Format(PyExc_OSError, "Operating system error NTSTATUS=0x%08lX", status); + } +#endif +} #endif #if defined(MS_WINDOWS) && !defined(DONT_USE_SEH) @@ -303,9 +321,7 @@ do { \ assert(record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR || \ record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION); \ if (record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR) { \ - NTSTATUS status = (NTSTATUS) record.ExceptionInformation[2]; \ - ULONG code = LsaNtStatusToWinError(status); \ - PyErr_SetFromWindowsErr(code); \ + _PyErr_SetFromNTSTATUS((ULONG)record.ExceptionInformation[2]); \ } \ else if (record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION) { \ PyErr_SetFromWindowsErr(ERROR_NOACCESS); \ @@ -332,9 +348,7 @@ do { \ assert(record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR || \ record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION); \ if (record.ExceptionCode == EXCEPTION_IN_PAGE_ERROR) { \ - NTSTATUS status = (NTSTATUS) record.ExceptionInformation[2]; \ - ULONG code = LsaNtStatusToWinError(status); \ - PyErr_SetFromWindowsErr(code); \ + _PyErr_SetFromNTSTATUS((ULONG)record.ExceptionInformation[2]); \ } \ else if (record.ExceptionCode == EXCEPTION_ACCESS_VIOLATION) { \ PyErr_SetFromWindowsErr(ERROR_NOACCESS); \ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 9cc0533f0dc..588894adeac 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -1782,7 +1782,7 @@ convertenviron(void) return NULL; } #ifdef MS_WINDOWS - v = PyUnicode_FromWideChar(p+1, wcslen(p+1)); + v = PyUnicode_FromWideChar(p+1, -1); #else v = PyBytes_FromStringAndSize(p+1, strlen(p+1)); #endif @@ -5052,7 +5052,7 @@ os__getfullpathname_impl(PyObject *module, path_t *path) return PyErr_NoMemory(); } - PyObject *str = PyUnicode_FromWideChar(abspath, wcslen(abspath)); + PyObject *str = PyUnicode_FromWideChar(abspath, -1); PyMem_RawFree(abspath); if (str == NULL) { return NULL; @@ -5168,7 +5168,7 @@ os__findfirstfile_impl(PyObject *module, path_t *path) } wRealFileName = wFileData.cFileName; - result = PyUnicode_FromWideChar(wRealFileName, wcslen(wRealFileName)); + result = PyUnicode_FromWideChar(wRealFileName, -1); FindClose(hFindFile); return result; } @@ -5212,7 +5212,7 @@ os__getvolumepathname_impl(PyObject *module, path_t *path) result = win32_error_object("_getvolumepathname", path->object); goto exit; } - result = PyUnicode_FromWideChar(mountpath, wcslen(mountpath)); + result = PyUnicode_FromWideChar(mountpath, -1); if (PyBytes_Check(path->object)) Py_SETREF(result, PyUnicode_EncodeFSDefault(result)); @@ -5736,6 +5736,9 @@ os_mkdir_impl(PyObject *module, path_t *path, int mode, int dir_fd) #ifdef MS_WINDOWS Py_BEGIN_ALLOW_THREADS + // For API sets that don't support these APIs, we have no choice + // but to silently create a directory with default ACL. +#if defined(MS_WINDOWS_APP) || defined(MS_WINDOWS_SYSTEM) if (mode == 0700 /* 0o700 */) { ULONG sdSize; pSecAttr = &secAttr; @@ -5751,6 +5754,7 @@ os_mkdir_impl(PyObject *module, path_t *path, int mode, int dir_fd) error = GetLastError(); } } +#endif if (!error) { result = CreateDirectoryW(path->wide, pSecAttr); if (secAttr.lpSecurityDescriptor && diff --git a/Modules/socketmodule.h b/Modules/socketmodule.h index 63624d511c3..200b2b8c7d8 100644 --- a/Modules/socketmodule.h +++ b/Modules/socketmodule.h @@ -259,7 +259,7 @@ typedef int SOCKET_T; #endif // AF_HYPERV is only supported on Windows -#if defined(AF_HYPERV) && defined(MS_WINDOWS) +#if defined(AF_HYPERV) && (defined(MS_WINDOWS_APP) || defined(MS_WINDOWS_SYSTEM)) # define HAVE_AF_HYPERV #endif diff --git a/Objects/dictobject.c b/Objects/dictobject.c index ce27e47dabf..fd8ccf56324 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -2916,6 +2916,11 @@ clear_lock_held(PyObject *op) } void +_PyDict_Clear_LockHeld(PyObject *op) { + clear_lock_held(op); +} + +void PyDict_Clear(PyObject *op) { Py_BEGIN_CRITICAL_SECTION(op); diff --git a/Objects/interpolationobject.c b/Objects/interpolationobject.c index aaea3b8c067..a5d407a7b0e 100644 --- a/Objects/interpolationobject.c +++ b/Objects/interpolationobject.c @@ -137,6 +137,8 @@ interpolation_reduce(PyObject *op, PyObject *Py_UNUSED(dummy)) static PyMethodDef interpolation_methods[] = { {"__reduce__", interpolation_reduce, METH_NOARGS, PyDoc_STR("__reduce__() -> (cls, state)")}, + {"__class_getitem__", Py_GenericAlias, + METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, {NULL, NULL}, }; diff --git a/Objects/templateobject.c b/Objects/templateobject.c index 7d356980b56..4293a311c44 100644 --- a/Objects/templateobject.c +++ b/Objects/templateobject.c @@ -23,6 +23,9 @@ templateiter_next(PyObject *op) if (self->from_strings) { item = PyIter_Next(self->stringsiter); self->from_strings = 0; + if (item == NULL) { + return NULL; + } if (PyUnicode_GET_LENGTH(item) == 0) { Py_SETREF(item, PyIter_Next(self->interpolationsiter)); self->from_strings = 1; @@ -444,6 +447,8 @@ template_reduce(PyObject *op, PyObject *Py_UNUSED(dummy)) static PyMethodDef template_methods[] = { {"__reduce__", template_reduce, METH_NOARGS, NULL}, + {"__class_getitem__", Py_GenericAlias, + METH_O|METH_CLASS, PyDoc_STR("See PEP 585")}, {NULL, NULL}, }; diff --git a/PC/winreg.c b/PC/winreg.c index c0de5c1353a..d1a1c3d1c97 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -426,7 +426,9 @@ PyHKEY_Close(winreg_state *st, PyObject *ob_handle) if (PyHKEY_Check(st, ob_handle)) { ((PyHKEYObject*)ob_handle)->hkey = 0; } + Py_BEGIN_ALLOW_THREADS rc = key ? RegCloseKey(key) : ERROR_SUCCESS; + Py_END_ALLOW_THREADS if (rc != ERROR_SUCCESS) PyErr_SetFromWindowsErrWithFunction(rc, "RegCloseKey"); return rc == ERROR_SUCCESS; @@ -499,14 +501,21 @@ PyWinObject_CloseHKEY(winreg_state *st, PyObject *obHandle) } #if SIZEOF_LONG >= SIZEOF_HKEY else if (PyLong_Check(obHandle)) { - long rc = RegCloseKey((HKEY)PyLong_AsLong(obHandle)); + long rc; + Py_BEGIN_ALLOW_THREADS + rc = RegCloseKey((HKEY)PyLong_AsLong(obHandle)); + Py_END_ALLOW_THREADS ok = (rc == ERROR_SUCCESS); if (!ok) PyErr_SetFromWindowsErrWithFunction(rc, "RegCloseKey"); } #else else if (PyLong_Check(obHandle)) { - long rc = RegCloseKey((HKEY)PyLong_AsVoidPtr(obHandle)); + long rc; + HKEY hkey = (HKEY)PyLong_AsVoidPtr(obHandle); + Py_BEGIN_ALLOW_THREADS + rc = RegCloseKey(hkey); + Py_END_ALLOW_THREADS ok = (rc == ERROR_SUCCESS); if (!ok) PyErr_SetFromWindowsErrWithFunction(rc, "RegCloseKey"); @@ -924,7 +933,9 @@ winreg_CreateKey_impl(PyObject *module, HKEY key, const wchar_t *sub_key) (Py_ssize_t)KEY_WRITE) < 0) { return NULL; } + Py_BEGIN_ALLOW_THREADS rc = RegCreateKeyW(key, sub_key, &retKey); + Py_END_ALLOW_THREADS if (rc != ERROR_SUCCESS) { PyErr_SetFromWindowsErrWithFunction(rc, "CreateKey"); return NULL; @@ -973,8 +984,10 @@ winreg_CreateKeyEx_impl(PyObject *module, HKEY key, const wchar_t *sub_key, (Py_ssize_t)access) < 0) { return NULL; } + Py_BEGIN_ALLOW_THREADS rc = RegCreateKeyExW(key, sub_key, reserved, NULL, 0, access, NULL, &retKey, NULL); + Py_END_ALLOW_THREADS if (rc != ERROR_SUCCESS) { PyErr_SetFromWindowsErrWithFunction(rc, "CreateKeyEx"); return NULL; @@ -1187,10 +1200,12 @@ winreg_EnumValue_impl(PyObject *module, HKEY key, int index) (Py_ssize_t)key, index) < 0) { return NULL; } - if ((rc = RegQueryInfoKeyW(key, NULL, NULL, NULL, NULL, NULL, NULL, - NULL, - &retValueSize, &retDataSize, NULL, NULL)) - != ERROR_SUCCESS) + + Py_BEGIN_ALLOW_THREADS + rc = RegQueryInfoKeyW(key, NULL, NULL, NULL, NULL, NULL, NULL, NULL, + &retValueSize, &retDataSize, NULL, NULL); + Py_END_ALLOW_THREADS + if (rc != ERROR_SUCCESS) return PyErr_SetFromWindowsErrWithFunction(rc, "RegQueryInfoKey"); ++retValueSize; /* include null terminators */ @@ -1290,7 +1305,7 @@ winreg_ExpandEnvironmentStrings_impl(PyObject *module, const wchar_t *string) return PyErr_SetFromWindowsErrWithFunction(retValueSize, "ExpandEnvironmentStrings"); } - o = PyUnicode_FromWideChar(retValue, wcslen(retValue)); + o = PyUnicode_FromWideChar(retValue, -1); PyMem_Free(retValue); return o; } @@ -1477,9 +1492,11 @@ winreg_QueryInfoKey_impl(PyObject *module, HKEY key) if (PySys_Audit("winreg.QueryInfoKey", "n", (Py_ssize_t)key) < 0) { return NULL; } - if ((rc = RegQueryInfoKeyW(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, - &nValues, NULL, NULL, NULL, &ft)) - != ERROR_SUCCESS) { + Py_BEGIN_ALLOW_THREADS + rc = RegQueryInfoKeyW(key, NULL, NULL, 0, &nSubKeys, NULL, NULL, + &nValues, NULL, NULL, NULL, &ft); + Py_END_ALLOW_THREADS + if (rc != ERROR_SUCCESS) { return PyErr_SetFromWindowsErrWithFunction(rc, "RegQueryInfoKey"); } li.LowPart = ft.dwLowDateTime; @@ -1587,7 +1604,9 @@ exit: PyMem_Free(pbuf); } if (childKey != key) { + Py_BEGIN_ALLOW_THREADS RegCloseKey(childKey); + Py_END_ALLOW_THREADS } return result; } @@ -1625,7 +1644,9 @@ winreg_QueryValueEx_impl(PyObject *module, HKEY key, const wchar_t *name) (Py_ssize_t)key, NULL, name) < 0) { return NULL; } + Py_BEGIN_ALLOW_THREADS rc = RegQueryValueExW(key, name, NULL, NULL, NULL, &bufSize); + Py_END_ALLOW_THREADS if (rc == ERROR_MORE_DATA) bufSize = 256; else if (rc != ERROR_SUCCESS) @@ -1637,8 +1658,10 @@ winreg_QueryValueEx_impl(PyObject *module, HKEY key, const wchar_t *name) while (1) { retSize = bufSize; + Py_BEGIN_ALLOW_THREADS rc = RegQueryValueExW(key, name, NULL, &typ, (BYTE *)retBuf, &retSize); + Py_END_ALLOW_THREADS if (rc != ERROR_MORE_DATA) break; diff --git a/Parser/parser.c b/Parser/parser.c index b48b1b20ee4..84a293cddff 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -24202,7 +24202,7 @@ invalid_try_stmt_rule(Parser *p) // | 'except' expression ',' expressions 'as' NAME ':' // | 'except' expression ['as' NAME] NEWLINE // | 'except' NEWLINE -// | 'except' expression 'as' expression +// | 'except' expression 'as' expression ':' block static void * invalid_except_stmt_rule(Parser *p) { @@ -24318,15 +24318,17 @@ invalid_except_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_except_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' NEWLINE")); } - { // 'except' expression 'as' expression + { // 'except' expression 'as' expression ':' block if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression 'as' expression")); + D(fprintf(stderr, "%*c> invalid_except_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression 'as' expression ':' block")); Token * _keyword; Token * _keyword_1; + Token * _literal; expr_ty a; + asdl_stmt_seq* block_var; expr_ty expression_var; if ( (_keyword = _PyPegen_expect_token(p, 677)) // token='except' @@ -24336,9 +24338,13 @@ invalid_except_stmt_rule(Parser *p) (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' && (a = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (block_var = block_rule(p)) // block ) { - D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression 'as' expression")); + D(fprintf(stderr, "%*c+ invalid_except_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression 'as' expression ':' block")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use except statement with %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24349,7 +24355,7 @@ invalid_except_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_except_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression 'as' expression")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression 'as' expression ':' block")); } _res = NULL; done: @@ -24361,7 +24367,7 @@ invalid_except_stmt_rule(Parser *p) // | 'except' '*' expression ',' expressions 'as' NAME ':' // | 'except' '*' expression ['as' NAME] NEWLINE // | 'except' '*' (NEWLINE | ':') -// | 'except' '*' expression 'as' expression +// | 'except' '*' expression 'as' expression ':' block static void * invalid_except_star_stmt_rule(Parser *p) { @@ -24486,16 +24492,18 @@ invalid_except_star_stmt_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_except_star_stmt[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' (NEWLINE | ':')")); } - { // 'except' '*' expression 'as' expression + { // 'except' '*' expression 'as' expression ':' block if (p->error_indicator) { p->level--; return NULL; } - D(fprintf(stderr, "%*c> invalid_except_star_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' expression 'as' expression")); + D(fprintf(stderr, "%*c> invalid_except_star_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' expression 'as' expression ':' block")); Token * _keyword; Token * _keyword_1; Token * _literal; + Token * _literal_1; expr_ty a; + asdl_stmt_seq* block_var; expr_ty expression_var; if ( (_keyword = _PyPegen_expect_token(p, 677)) // token='except' @@ -24507,9 +24515,13 @@ invalid_except_star_stmt_rule(Parser *p) (_keyword_1 = _PyPegen_expect_token(p, 680)) // token='as' && (a = expression_rule(p)) // expression + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + && + (block_var = block_rule(p)) // block ) { - D(fprintf(stderr, "%*c+ invalid_except_star_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' expression 'as' expression")); + D(fprintf(stderr, "%*c+ invalid_except_star_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' expression 'as' expression ':' block")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use except* statement with %s" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24520,7 +24532,7 @@ invalid_except_star_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_except_star_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' expression 'as' expression")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' '*' expression 'as' expression ':' block")); } _res = NULL; done: diff --git a/Python/fileutils.c b/Python/fileutils.c index 78603d40704..2a3f12d4e87 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -2784,6 +2784,43 @@ error: return -1; } #else /* MS_WINDOWS */ + +// The Windows Games API family doesn't expose GetNamedPipeHandleStateW so attempt +// to load it directly from the Kernel32.dll +#if !defined(MS_WINDOWS_APP) && !defined(MS_WINDOWS_SYSTEM) +BOOL +GetNamedPipeHandleStateW(HANDLE hNamedPipe, LPDWORD lpState, LPDWORD lpCurInstances, LPDWORD lpMaxCollectionCount, + LPDWORD lpCollectDataTimeout, LPWSTR lpUserName, DWORD nMaxUserNameSize) +{ + static int initialized = 0; + typedef BOOL(__stdcall* PGetNamedPipeHandleStateW) ( + HANDLE hNamedPipe, LPDWORD lpState, LPDWORD lpCurInstances, LPDWORD lpMaxCollectionCount, + LPDWORD lpCollectDataTimeout, LPWSTR lpUserName, DWORD nMaxUserNameSize); + static PGetNamedPipeHandleStateW _GetNamedPipeHandleStateW; + + if (initialized == 0) { + HMODULE api = LoadLibraryExW(L"Kernel32.dll", NULL, LOAD_LIBRARY_SEARCH_SYSTEM32); + if (api) { + _GetNamedPipeHandleStateW = (PGetNamedPipeHandleStateW)GetProcAddress( + api, "GetNamedPipeHandleStateW"); + } + else { + _GetNamedPipeHandleStateW = NULL; + } + initialized = 1; + } + + if (!_GetNamedPipeHandleStateW) { + SetLastError(E_NOINTERFACE); + return FALSE; + } + + return _GetNamedPipeHandleStateW( + hNamedPipe, lpState, lpCurInstances, lpMaxCollectionCount, lpCollectDataTimeout, lpUserName, nMaxUserNameSize + ); +} +#endif /* !MS_WINDOWS_APP && !MS_WINDOWS_SYSTEM */ + int _Py_get_blocking(int fd) { diff --git a/Python/import.c b/Python/import.c index 9dec0f488a3..e7be1b90751 100644 --- a/Python/import.c +++ b/Python/import.c @@ -3854,15 +3854,17 @@ PyImport_ImportModuleLevelObject(PyObject *name, PyObject *globals, } final_mod = import_get_module(tstate, to_return); - Py_DECREF(to_return); if (final_mod == NULL) { if (!_PyErr_Occurred(tstate)) { _PyErr_Format(tstate, PyExc_KeyError, "%R not in sys.modules as expected", to_return); } + Py_DECREF(to_return); goto error; } + + Py_DECREF(to_return); } } else { diff --git a/Python/pystate.c b/Python/pystate.c index 1ac13440085..14ae2748b0b 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1908,9 +1908,14 @@ tstate_delete_common(PyThreadState *tstate, int release_gil) static void zapthreads(PyInterpreterState *interp) { + PyThreadState *tstate; /* No need to lock the mutex here because this should only happen - when the threads are all really dead (XXX famous last words). */ - _Py_FOR_EACH_TSTATE_UNLOCKED(interp, tstate) { + when the threads are all really dead (XXX famous last words). + + Cannot use _Py_FOR_EACH_TSTATE_UNLOCKED because we are freeing + the thread states here. + */ + while ((tstate = interp->threads.head) != NULL) { tstate_verify_not_active(tstate); tstate_delete_common(tstate, 0); free_threadstate((_PyThreadStateImpl *)tstate); diff --git a/Python/sysmodule.c b/Python/sysmodule.c index b46c2ab7c44..4ed045e3297 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -1643,6 +1643,7 @@ static PyObject * _sys_getwindowsversion_from_kernel32(void) { #ifndef MS_WINDOWS_DESKTOP + PyErr_SetString(PyExc_OSError, "cannot read version info on this platform"); return NULL; #else HANDLE hKernel32; @@ -2484,7 +2485,7 @@ sys_remote_exec_impl(PyObject *module, int pid, PyObject *script) PyObject *path; const char *debugger_script_path; - if (PyUnicode_FSConverter(script, &path) < 0) { + if (PyUnicode_FSConverter(script, &path) == 0) { return NULL; } debugger_script_path = PyBytes_AS_STRING(path); diff --git a/Tools/build/.ruff.toml b/Tools/build/.ruff.toml index fa7689d45db..dcbf2936290 100644 --- a/Tools/build/.ruff.toml +++ b/Tools/build/.ruff.toml @@ -1,7 +1,7 @@ extend = "../../.ruff.toml" # Inherit the project-wide settings [per-file-target-version] -"deepfreeze.py" = "py310" +"deepfreeze.py" = "py311" # requires `code.co_exceptiontable` "stable_abi.py" = "py311" # requires 'tomllib' [format] diff --git a/Tools/build/compute-changes.py b/Tools/build/compute-changes.py index cfdd55fd192..b5993d29b92 100644 --- a/Tools/build/compute-changes.py +++ b/Tools/build/compute-changes.py @@ -57,7 +57,7 @@ class Outputs: def compute_changes() -> None: target_branch, head_ref = git_refs() - if target_branch and head_ref: + if os.environ.get("GITHUB_EVENT_NAME", "") == "pull_request": # Getting changed files only makes sense on a pull request files = get_changed_files(target_branch, head_ref) outputs = process_changed_files(files) diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py index 23f58447937..2b9f03aebb6 100644 --- a/Tools/build/deepfreeze.py +++ b/Tools/build/deepfreeze.py @@ -2,9 +2,12 @@ The script may be executed by _bootstrap_python interpreter. Shared library extension modules are not available in that case. -On Windows, and in cross-compilation cases, it is executed -by Python 3.10, and 3.11 features are not available. +Requires 3.11+ to be executed, +because relies on `code.co_qualname` and `code.co_exceptiontable`. """ + +from __future__ import annotations + import argparse import builtins import collections @@ -13,10 +16,14 @@ import os import re import time import types -from typing import TextIO import umarshal +TYPE_CHECKING = False +if TYPE_CHECKING: + from collections.abc import Iterator + from typing import Any, TextIO + ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__))) verbose = False @@ -45,8 +52,8 @@ CO_FAST_FREE = 0x80 next_code_version = 1 -def get_localsplus(code: types.CodeType): - a = collections.defaultdict(int) +def get_localsplus(code: types.CodeType) -> tuple[tuple[str, ...], bytes]: + a: collections.defaultdict[str, int] = collections.defaultdict(int) for name in code.co_varnames: a[name] |= CO_FAST_LOCAL for name in code.co_cellvars: @@ -136,7 +143,7 @@ class Printer: return identifiers, strings @contextlib.contextmanager - def indent(self) -> None: + def indent(self) -> Iterator[None]: save_level = self.level try: self.level += 1 @@ -148,7 +155,7 @@ class Printer: self.file.writelines((" "*self.level, arg, "\n")) @contextlib.contextmanager - def block(self, prefix: str, suffix: str = "") -> None: + def block(self, prefix: str, suffix: str = "") -> Iterator[None]: self.write(prefix + " {") with self.indent(): yield @@ -250,9 +257,17 @@ class Printer: co_names = self.generate(name + "_names", code.co_names) co_filename = self.generate(name + "_filename", code.co_filename) co_name = self.generate(name + "_name", code.co_name) - co_qualname = self.generate(name + "_qualname", code.co_qualname) co_linetable = self.generate(name + "_linetable", code.co_linetable) - co_exceptiontable = self.generate(name + "_exceptiontable", code.co_exceptiontable) + # We use 3.10 for type checking, but this module requires 3.11 + # TODO: bump python version for this script. + co_qualname = self.generate( + name + "_qualname", + code.co_qualname, # type: ignore[attr-defined] + ) + co_exceptiontable = self.generate( + name + "_exceptiontable", + code.co_exceptiontable, # type: ignore[attr-defined] + ) # These fields are not directly accessible localsplusnames, localspluskinds = get_localsplus(code) co_localsplusnames = self.generate(name + "_localsplusnames", localsplusnames) @@ -379,13 +394,13 @@ class Printer: self.write(f".cval = {{ {z.real}, {z.imag} }},") return f"&{name}.ob_base" - def generate_frozenset(self, name: str, fs: frozenset[object]) -> str: + def generate_frozenset(self, name: str, fs: frozenset[Any]) -> str: try: - fs = sorted(fs) + fs_sorted = sorted(fs) except TypeError: # frozen set with incompatible types, fallback to repr() - fs = sorted(fs, key=repr) - ret = self.generate_tuple(name, tuple(fs)) + fs_sorted = sorted(fs, key=repr) + ret = self.generate_tuple(name, tuple(fs_sorted)) self.write("// TODO: The above tuple should be a frozenset") return ret @@ -402,7 +417,7 @@ class Printer: # print(f"Cache hit {key!r:.40}: {self.cache[key]!r:.40}") return self.cache[key] self.misses += 1 - if isinstance(obj, (types.CodeType, umarshal.Code)) : + if isinstance(obj, types.CodeType) : val = self.generate_code(name, obj) elif isinstance(obj, tuple): val = self.generate_tuple(name, obj) @@ -458,7 +473,7 @@ def decode_frozen_data(source: str) -> types.CodeType: if re.match(FROZEN_DATA_LINE, line): values.extend([int(x) for x in line.split(",") if x.strip()]) data = bytes(values) - return umarshal.loads(data) + return umarshal.loads(data) # type: ignore[no-any-return] def generate(args: list[str], output: TextIO) -> None: @@ -494,12 +509,12 @@ group.add_argument('args', nargs="*", default=(), help="Input file and module name (required) in file:modname format") @contextlib.contextmanager -def report_time(label: str): - t0 = time.time() +def report_time(label: str) -> Iterator[None]: + t0 = time.perf_counter() try: yield finally: - t1 = time.time() + t1 = time.perf_counter() if verbose: print(f"{label}: {t1-t0:.3f} sec") diff --git a/Tools/build/mypy.ini b/Tools/build/mypy.ini index 62d7e150fd8..123dc895f90 100644 --- a/Tools/build/mypy.ini +++ b/Tools/build/mypy.ini @@ -4,10 +4,12 @@ # .github/workflows/mypy.yml files = Tools/build/compute-changes.py, + Tools/build/deepfreeze.py, Tools/build/generate-build-details.py, Tools/build/generate_sbom.py, Tools/build/verify_ensurepip_wheels.py, - Tools/build/update_file.py + Tools/build/update_file.py, + Tools/build/umarshal.py pretty = True diff --git a/Tools/build/umarshal.py b/Tools/build/umarshal.py index 679fa7caf9f..865cffc2440 100644 --- a/Tools/build/umarshal.py +++ b/Tools/build/umarshal.py @@ -145,12 +145,12 @@ class Reader: def r_float_bin(self) -> float: buf = self.r_string(8) import struct # Lazy import to avoid breaking UNIX build - return struct.unpack("d", buf)[0] + return struct.unpack("d", buf)[0] # type: ignore[no-any-return] def r_float_str(self) -> float: n = self.r_byte() buf = self.r_string(n) - return ast.literal_eval(buf.decode("ascii")) + return ast.literal_eval(buf.decode("ascii")) # type: ignore[no-any-return] def r_ref_reserve(self, flag: int) -> int: if flag: @@ -306,7 +306,7 @@ def loads(data: bytes) -> Any: return r.r_object() -def main(): +def main() -> None: # Test import marshal import pprint @@ -314,8 +314,9 @@ def main(): data = marshal.dumps(sample) retval = loads(data) assert retval == sample, retval - sample = main.__code__ - data = marshal.dumps(sample) + + sample2 = main.__code__ + data = marshal.dumps(sample2) retval = loads(data) assert isinstance(retval, Code), retval pprint.pprint(retval.__dict__) |