aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/jit.yml4
-rwxr-xr-x.github/workflows/posix-deps-apt.sh7
-rw-r--r--.pre-commit-config.yaml7
-rw-r--r--Doc/c-api/capsule.rst10
-rw-r--r--Doc/c-api/code.rst65
-rw-r--r--Doc/c-api/init.rst14
-rw-r--r--Doc/c-api/long.rst2
-rw-r--r--Doc/c-api/object.rst15
-rw-r--r--Doc/c-api/structures.rst75
-rw-r--r--Doc/c-api/typeobj.rst11
-rw-r--r--Doc/c-api/veryhigh.rst8
-rw-r--r--Doc/conf.py19
-rw-r--r--Doc/deprecations/c-api-pending-removal-in-3.15.rst1
-rw-r--r--Doc/deprecations/c-api-pending-removal-in-3.16.rst4
-rw-r--r--Doc/deprecations/pending-removal-in-3.15.rst2
-rw-r--r--Doc/deprecations/pending-removal-in-future.rst2
-rw-r--r--Doc/extending/newtypes_tutorial.rst2
-rw-r--r--Doc/howto/functional.rst2
-rw-r--r--Doc/howto/isolating-extensions.rst3
-rw-r--r--Doc/howto/logging-cookbook.rst62
-rw-r--r--Doc/installing/index.rst2
-rw-r--r--Doc/library/__future__.rst78
-rw-r--r--Doc/library/argparse.rst37
-rw-r--r--Doc/library/codecs.rst8
-rw-r--r--Doc/library/compression.zstd.rst10
-rw-r--r--Doc/library/concurrent.interpreters.rst223
-rw-r--r--Doc/library/csv.rst42
-rw-r--r--Doc/library/ctypes.rst2
-rw-r--r--Doc/library/email.compat32-message.rst18
-rw-r--r--Doc/library/email.header.rst4
-rw-r--r--Doc/library/exceptions.rst11
-rw-r--r--Doc/library/faulthandler.rst39
-rw-r--r--Doc/library/fractions.rst2
-rw-r--r--Doc/library/http.cookiejar.rst2
-rw-r--r--Doc/library/io.rst3
-rw-r--r--Doc/library/math.rst12
-rw-r--r--Doc/library/mmap.rst2
-rw-r--r--Doc/library/multiprocessing.rst12
-rw-r--r--Doc/library/pathlib.rst2
-rw-r--r--Doc/library/pyexpat.rst7
-rw-r--r--Doc/library/random.rst5
-rw-r--r--Doc/library/security_warnings.rst2
-rw-r--r--Doc/library/shutil.rst11
-rw-r--r--Doc/library/socketserver.rst2
-rw-r--r--Doc/library/sys.monitoring.rst61
-rw-r--r--Doc/library/sys.rst2
-rw-r--r--Doc/library/tarfile.rst42
-rw-r--r--Doc/library/threading.rst2
-rw-r--r--Doc/library/time.rst7
-rw-r--r--Doc/library/unittest.rst9
-rw-r--r--Doc/library/venv.rst70
-rw-r--r--Doc/library/xml.dom.minidom.rst7
-rw-r--r--Doc/library/xml.dom.pulldom.rst7
-rw-r--r--Doc/library/xml.etree.elementtree.rst7
-rw-r--r--Doc/library/xml.rst76
-rw-r--r--Doc/library/xml.sax.rst7
-rw-r--r--Doc/library/xmlrpc.client.rst4
-rw-r--r--Doc/library/xmlrpc.server.rst4
-rw-r--r--Doc/library/zipfile.rst44
-rw-r--r--Doc/tools/.nitignore2
-rw-r--r--Doc/using/cmdline.rst4
-rw-r--r--Doc/using/configure.rst10
-rw-r--r--Doc/using/ios.rst11
-rw-r--r--Doc/whatsnew/3.12.rst2
-rw-r--r--Doc/whatsnew/3.13.rst6
-rw-r--r--Doc/whatsnew/3.14.rst6
-rw-r--r--Doc/whatsnew/3.15.rst42
-rw-r--r--Doc/whatsnew/3.9.rst10
-rw-r--r--Grammar/python.gram3
-rw-r--r--Include/cpython/lock.h11
-rw-r--r--Include/internal/mimalloc/mimalloc/types.h2
-rw-r--r--Include/internal/pycore_global_objects_fini_generated.h1
-rw-r--r--Include/internal/pycore_global_strings.h1
-rw-r--r--Include/internal/pycore_lock.h12
-rw-r--r--Include/internal/pycore_object.h20
-rw-r--r--Include/internal/pycore_optimizer.h5
-rw-r--r--Include/internal/pycore_pystate.h3
-rw-r--r--Include/internal/pycore_pythonrun.h22
-rw-r--r--Include/internal/pycore_runtime_init_generated.h1
-rw-r--r--Include/internal/pycore_stackref.h10
-rw-r--r--Include/internal/pycore_unicodeobject_generated.h4
-rw-r--r--Include/internal/pycore_uop_metadata.h2
-rw-r--r--Include/internal/pycore_weakref.h14
-rw-r--r--Include/pyport.h19
-rw-r--r--Include/pythonrun.h25
-rw-r--r--Include/refcount.h16
-rw-r--r--InternalDocs/garbage_collector.md2
-rw-r--r--Lib/_pydecimal.py14
-rw-r--r--Lib/_pyio.py80
-rw-r--r--Lib/_strptime.py202
-rw-r--r--Lib/asyncio/base_events.py70
-rw-r--r--Lib/concurrent/interpreters/__init__.py8
-rw-r--r--Lib/concurrent/interpreters/_queues.py8
-rw-r--r--Lib/difflib.py4
-rw-r--r--Lib/encodings/idna.py2
-rw-r--r--Lib/encodings/palmos.py2
-rw-r--r--Lib/fractions.py13
-rw-r--r--Lib/html/parser.py161
-rw-r--r--Lib/idlelib/configdialog.py2
-rw-r--r--Lib/idlelib/debugger.py2
-rw-r--r--Lib/idlelib/editor.py2
-rw-r--r--Lib/idlelib/idle_test/htest.py2
-rw-r--r--Lib/os.py2
-rw-r--r--Lib/platform.py22
-rw-r--r--Lib/sre_compile.py7
-rw-r--r--Lib/sre_constants.py7
-rw-r--r--Lib/sre_parse.py7
-rw-r--r--Lib/test/support/__init__.py1
-rw-r--r--Lib/test/support/channels.py8
-rw-r--r--Lib/test/test_asyncio/test_base_events.py147
-rw-r--r--Lib/test/test_builtin.py3
-rw-r--r--Lib/test/test_capi/test_abstract.py25
-rw-r--r--Lib/test/test_capi/test_opt.py15
-rw-r--r--Lib/test/test_configparser.py12
-rw-r--r--Lib/test/test_ctypes/test_parameters.py4
-rw-r--r--Lib/test/test_dbm.py3
-rw-r--r--Lib/test/test_decimal.py13
-rw-r--r--Lib/test/test_enum.py2
-rw-r--r--Lib/test/test_external_inspection.py122
-rw-r--r--Lib/test/test_fileio.py12
-rw-r--r--Lib/test/test_float.py2
-rw-r--r--Lib/test/test_format.py10
-rw-r--r--Lib/test/test_fractions.py20
-rw-r--r--Lib/test/test_free_threading/test_io.py42
-rw-r--r--Lib/test/test_free_threading/test_itertools.py32
-rw-r--r--Lib/test/test_free_threading/test_itertools_combinatoric.py51
-rw-r--r--Lib/test/test_fstring.py8
-rw-r--r--Lib/test/test_generated_cases.py244
-rw-r--r--Lib/test/test_getpath.py21
-rw-r--r--Lib/test/test_hashlib.py8
-rw-r--r--Lib/test/test_htmlparser.py187
-rw-r--r--Lib/test/test_interpreters/test_api.py8
-rw-r--r--Lib/test/test_interpreters/test_channels.py16
-rw-r--r--Lib/test/test_interpreters/test_queues.py8
-rw-r--r--Lib/test/test_io.py1
-rw-r--r--Lib/test/test_iter.py2
-rw-r--r--Lib/test/test_json/test_tool.py2
-rw-r--r--Lib/test/test_listcomps.py2
-rw-r--r--Lib/test/test_math.py15
-rw-r--r--Lib/test/test_optparse.py4
-rw-r--r--Lib/test/test_peepholer.py7
-rw-r--r--Lib/test/test_platform.py16
-rw-r--r--Lib/test/test_pprint.py2
-rw-r--r--Lib/test/test_pyclbr.py4
-rw-r--r--Lib/test/test_pyexpat.py20
-rw-r--r--Lib/test/test_re.py28
-rw-r--r--Lib/test/test_reprlib.py20
-rw-r--r--Lib/test/test_sqlite3/test_dbapi.py26
-rw-r--r--Lib/test/test_statistics.py3
-rw-r--r--Lib/test/test_str.py8
-rw-r--r--Lib/test/test_strptime.py37
-rw-r--r--Lib/test/test_syntax.py7
-rw-r--r--Lib/test/test_sys.py7
-rw-r--r--Lib/test/test_threading.py55
-rw-r--r--Lib/test/test_types.py49
-rw-r--r--Lib/test/test_typing.py59
-rw-r--r--Lib/test/test_unittest/test_case.py16
-rw-r--r--Lib/test/test_xml_etree.py27
-rw-r--r--Lib/test/test_zoneinfo/test_zoneinfo.py43
-rw-r--r--Lib/test/test_zoneinfo/test_zoneinfo_property.py18
-rw-r--r--Lib/test/test_zstd.py9
-rw-r--r--Lib/test/typinganndata/fwdref_module.py6
-rw-r--r--Lib/typing.py50
-rw-r--r--Lib/unittest/_log.py5
-rw-r--r--Lib/unittest/case.py6
-rw-r--r--Lib/unittest/mock.py2
-rw-r--r--Lib/xml/etree/ElementTree.py10
-rw-r--r--Lib/zoneinfo/_common.py8
-rw-r--r--Misc/ACKS2
-rw-r--r--Misc/NEWS.d/next/Build/2025-05-16-07-46-06.gh-issue-115119.ALBgS_.rst4
-rw-r--r--Misc/NEWS.d/next/C_API/2025-05-20-17-13-51.gh-issue-134009.CpCmry.rst1
-rw-r--r--Misc/NEWS.d/next/C_API/2025-06-19-12-47-18.gh-issue-133157.1WA85f.rst1
-rw-r--r--Misc/NEWS.d/next/C_API/2025-06-25-01-03-10.gh-issue-135906.UBrCWq.rst1
-rw-r--r--Misc/NEWS.d/next/C_API/2025-07-01-16-22-39.gh-issue-135075.angu3J.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-04-19-16-22-47.gh-issue-132732.jgqhlF.rst1
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-31-19-24-54.gh-issue-134280.NDVbzY.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-06-12-11-19-52.gh-issue-135422.F6yQi6.rst1
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-06-16-03-56-15.gh-issue-135551.hRTQO-.rst1
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-06-17-22-34-58.gh-issue-135607.ucsLVu.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-06-24-16-46-34.gh-issue-135904.78xfon.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-06-26-15-25-51.gh-issue-78465.MbDN8X.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-07-02-15-18-41.gh-issue-136203.Y934sC.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-07-06-14-53-19.gh-issue-109700.KVNQQi.rst1
-rw-r--r--Misc/NEWS.d/next/Documentation/2025-07-01-21-04-47.gh-issue-136155.ufmH4Q.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-03-11-05-24-14.gh-issue-130664.g0yNMm.rst4
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-07-09-53-54.gh-issue-87790.6nj3zQ.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-07-10-20-16.gh-issue-87790.X2SjJe.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-22-21-00-23.gh-issue-123471.asOLA2.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-03-12-59-17.gh-issue-135069.xop30V.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-10-10-22-18.gh-issue-130870.JipqbO.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-18-19-25-32.gh-issue-123471.lx1Xbt.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-22-02-16-17.gh-issue-135640.FXyFL6.rst4
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-23-11-04-25.gh-issue-135836.-C-c4v.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-24-10-23-37.gh-issue-135853.6xDNOG.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-24-10-52-35.gh-issue-135836.s37351.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-26-11-52-40.gh-issue-53203.TMigBr.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-26-17-19-36.gh-issue-105456.eR9oHB.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-26-17-28-49.gh-issue-135995.pPrDCt.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-27-09-26-04.gh-issue-87135.33z0UW.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-27-13-34-28.gh-issue-136028.RY727g.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-30-11-12-24.gh-issue-85702.0Lrbwu.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-07-02-10-48-21.gh-issue-136193.xfvras.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-07-02-18-41-45.gh-issue-133982.7qqAn6.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-07-05-06-56-16.gh-issue-136316.3zj_Do.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-07-05-06-59-46.gh-issue-136047.qWvycf.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-07-05-09-45-04.gh-issue-136286.N67Amr.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-07-06-10-18-48.gh-issue-136021.f-FJYT.rst3
-rw-r--r--Misc/NEWS.d/next/Security/2025-06-18-13-28-08.gh-issue-102555.nADrzJ.rst3
-rw-r--r--Misc/NEWS.d/next/Security/2025-06-25-14-13-39.gh-issue-135661.idjQ0B.rst25
-rw-r--r--Misc/NEWS.d/next/Security/2025-06-27-21-23-19.gh-issue-136053.QZxcee.rst1
-rw-r--r--Misc/NEWS.d/next/Tests/2025-05-23-09-19-52.gh-issue-134567.hwEIMb.rst2
-rw-r--r--Misc/NEWS.d/next/Tests/2025-06-26-15-15-35.gh-issue-135966.EBpF8Y.rst1
-rw-r--r--Misc/NEWS.d/next/Tools-Demos/2025-06-26-15-58-13.gh-issue-135968.C4v_-W.rst1
-rw-r--r--Modules/_collectionsmodule.c5
-rw-r--r--Modules/_datetimemodule.c2
-rw-r--r--Modules/_elementtree.c4
-rw-r--r--Modules/_functoolsmodule.c9
-rw-r--r--Modules/_heapqmodule.c6
-rw-r--r--Modules/_io/bufferedio.c7
-rw-r--r--Modules/_io/bytesio.c4
-rw-r--r--Modules/_io/fileio.c5
-rw-r--r--Modules/_io/iobase.c4
-rw-r--r--Modules/_io/stringio.c5
-rw-r--r--Modules/_io/textio.c4
-rw-r--r--Modules/_io/winconsoleio.c4
-rw-r--r--Modules/_localemodule.c2
-rw-r--r--Modules/_queuemodule.c5
-rw-r--r--Modules/_remote_debugging_module.c83
-rw-r--r--Modules/_sqlite/blob.c5
-rw-r--r--Modules/_sqlite/cursor.c5
-rw-r--r--Modules/_sre/sre.c6
-rw-r--r--Modules/_stat.c4
-rw-r--r--Modules/_struct.c5
-rw-r--r--Modules/_testcapi/abstract.c38
-rw-r--r--Modules/_testcapi/vectorcall.c8
-rw-r--r--Modules/_testinternalcapi.c2
-rw-r--r--Modules/_threadmodule.c22
-rw-r--r--Modules/_zoneinfo.c5
-rw-r--r--Modules/arraymodule.c5
-rw-r--r--Modules/blake2module.c66
-rw-r--r--Modules/clinic/_remote_debugging_module.c.h40
-rw-r--r--Modules/clinic/mathmodule.c.h36
-rw-r--r--Modules/getpath.py11
-rw-r--r--Modules/hmacmodule.c21
-rw-r--r--Modules/itertoolsmodule.c38
-rw-r--r--Modules/mathmodule.c18
-rw-r--r--Modules/mmapmodule.c4
-rw-r--r--Modules/pyexpat.c85
-rw-r--r--Objects/classobject.c4
-rw-r--r--Objects/codeobject.c5
-rw-r--r--Objects/descrobject.c5
-rw-r--r--Objects/dictobject.c1
-rw-r--r--Objects/funcobject.c5
-rw-r--r--Objects/genericaliasobject.c5
-rw-r--r--Objects/genobject.c4
-rw-r--r--Objects/listobject.c48
-rw-r--r--Objects/listsort.txt175
-rw-r--r--Objects/methodobject.c5
-rw-r--r--Objects/moduleobject.c4
-rw-r--r--Objects/namespaceobject.c8
-rw-r--r--Objects/object.c47
-rw-r--r--Objects/odictobject.c4
-rw-r--r--Objects/picklebufobject.c4
-rw-r--r--Objects/setobject.c4
-rw-r--r--Objects/typeobject.c66
-rw-r--r--Objects/unionobject.c5
-rw-r--r--PCbuild/pythoncore.vcxproj8
-rw-r--r--Parser/parser.c3725
-rw-r--r--Python/bytecodes.c23
-rw-r--r--Python/ceval.c14
-rw-r--r--Python/executor_cases.c.h26
-rw-r--r--Python/flowgraph.c4
-rw-r--r--Python/gc.c2
-rw-r--r--Python/generated_cases.c.h27
-rw-r--r--Python/lock.c20
-rw-r--r--Python/marshal.c3
-rw-r--r--Python/optimizer_analysis.c7
-rw-r--r--Python/optimizer_bytecodes.c81
-rw-r--r--Python/optimizer_cases.c.h310
-rw-r--r--Python/optimizer_symbols.c41
-rw-r--r--Python/pylifecycle.c14
-rw-r--r--Python/remote_debug.h78
-rw-r--r--Python/stdlib_module_names.h3
-rw-r--r--Tools/cases_generator/generators_common.py11
-rw-r--r--Tools/cases_generator/optimizer_generator.py195
-rw-r--r--Tools/inspection/benchmark_external_inspection.py20
-rw-r--r--Tools/jit/_optimizers.py319
-rw-r--r--Tools/jit/_stencils.py67
-rw-r--r--Tools/jit/_targets.py58
-rw-r--r--Tools/scripts/summarize_stats.py2
-rwxr-xr-xconfigure52
-rw-r--r--configure.ac48
-rw-r--r--iOS/README.rst2
-rwxr-xr-xiOS/Resources/bin/arm64-apple-ios-simulator-strip2
-rwxr-xr-xiOS/Resources/bin/arm64-apple-ios-strip2
-rwxr-xr-xiOS/Resources/bin/x86_64-apple-ios-simulator-strip2
-rw-r--r--iOS/testbed/iOSTestbedTests/iOSTestbedTests.m51
-rw-r--r--pyconfig.h.in18
298 files changed, 6480 insertions, 3518 deletions
diff --git a/.github/workflows/jit.yml b/.github/workflows/jit.yml
index 116e0c1e945..947badff816 100644
--- a/.github/workflows/jit.yml
+++ b/.github/workflows/jit.yml
@@ -5,6 +5,8 @@ on:
- '**jit**'
- 'Python/bytecodes.c'
- 'Python/optimizer*.c'
+ - 'Python/executor_cases.c.h'
+ - 'Python/optimizer_cases.c.h'
- '!Python/perf_jit_trampoline.c'
- '!**/*.md'
- '!**/*.ini'
@@ -13,6 +15,8 @@ on:
- '**jit**'
- 'Python/bytecodes.c'
- 'Python/optimizer*.c'
+ - 'Python/executor_cases.c.h'
+ - 'Python/optimizer_cases.c.h'
- '!Python/perf_jit_trampoline.c'
- '!**/*.md'
- '!**/*.ini'
diff --git a/.github/workflows/posix-deps-apt.sh b/.github/workflows/posix-deps-apt.sh
index 7773222af5d..44e6a9ce2d0 100755
--- a/.github/workflows/posix-deps-apt.sh
+++ b/.github/workflows/posix-deps-apt.sh
@@ -25,3 +25,10 @@ apt-get -yq install \
uuid-dev \
xvfb \
zlib1g-dev
+
+# Workaround missing libmpdec-dev on ubuntu 24.04:
+# https://launchpad.net/~ondrej/+archive/ubuntu/php
+# https://deb.sury.org/
+sudo add-apt-repository ppa:ondrej/php
+apt-get update
+apt-get -yq install libmpdec-dev
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 822a8a9f4e5..86410c46d1d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -34,6 +34,13 @@ repos:
name: Run Black on Tools/jit/
files: ^Tools/jit/
+ - repo: https://github.com/Lucas-C/pre-commit-hooks
+ rev: v1.5.5
+ hooks:
+ - id: remove-tabs
+ types: [python]
+ exclude: ^Tools/c-analyzer/cpython/_parser.py
+
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
diff --git a/Doc/c-api/capsule.rst b/Doc/c-api/capsule.rst
index cdb8aa33e9f..64dc4f5275b 100644
--- a/Doc/c-api/capsule.rst
+++ b/Doc/c-api/capsule.rst
@@ -105,9 +105,19 @@ Refer to :ref:`using-capsules` for more information on using these objects.
``module.attribute``. The *name* stored in the capsule must match this
string exactly.
+ This function splits *name* on the ``.`` character, and imports the first
+ element. It then processes further elements using attribute lookups.
+
Return the capsule's internal *pointer* on success. On failure, set an
exception and return ``NULL``.
+ .. note::
+
+ If *name* points to an attribute of some submodule or subpackage, this
+ submodule or subpackage must be previously imported using other means
+ (for example, by using :c:func:`PyImport_ImportModule`) for the
+ attribute lookups to succeed.
+
.. versionchanged:: 3.3
*no_block* has no effect anymore.
diff --git a/Doc/c-api/code.rst b/Doc/c-api/code.rst
index 42594f063b0..717b0da8f87 100644
--- a/Doc/c-api/code.rst
+++ b/Doc/c-api/code.rst
@@ -211,6 +211,71 @@ bound into a function.
.. versionadded:: 3.12
+.. _c_codeobject_flags:
+
+Code Object Flags
+-----------------
+
+Code objects contain a bit-field of flags, which can be retrieved as the
+:attr:`~codeobject.co_flags` Python attribute (for example using
+:c:func:`PyObject_GetAttrString`), and set using a *flags* argument to
+:c:func:`PyUnstable_Code_New` and similar functions.
+
+Flags whose names start with ``CO_FUTURE_`` correspond to features normally
+selectable by :ref:`future statements <future>`. These flags can be used in
+:c:member:`PyCompilerFlags.cf_flags`.
+Note that many ``CO_FUTURE_`` flags are mandatory in current versions of
+Python, and setting them has no effect.
+
+The following flags are available.
+For their meaning, see the linked documentation of their Python equivalents.
+
+
+.. list-table::
+ :widths: auto
+ :header-rows: 1
+
+ * * Flag
+ * Meaning
+ * * .. c:macro:: CO_OPTIMIZED
+ * :py:data:`inspect.CO_OPTIMIZED`
+ * * .. c:macro:: CO_NEWLOCALS
+ * :py:data:`inspect.CO_NEWLOCALS`
+ * * .. c:macro:: CO_VARARGS
+ * :py:data:`inspect.CO_VARARGS`
+ * * .. c:macro:: CO_VARKEYWORDS
+ * :py:data:`inspect.CO_VARKEYWORDS`
+ * * .. c:macro:: CO_NESTED
+ * :py:data:`inspect.CO_NESTED`
+ * * .. c:macro:: CO_GENERATOR
+ * :py:data:`inspect.CO_GENERATOR`
+ * * .. c:macro:: CO_COROUTINE
+ * :py:data:`inspect.CO_COROUTINE`
+ * * .. c:macro:: CO_ITERABLE_COROUTINE
+ * :py:data:`inspect.CO_ITERABLE_COROUTINE`
+ * * .. c:macro:: CO_ASYNC_GENERATOR
+ * :py:data:`inspect.CO_ASYNC_GENERATOR`
+ * * .. c:macro:: CO_HAS_DOCSTRING
+ * :py:data:`inspect.CO_HAS_DOCSTRING`
+ * * .. c:macro:: CO_METHOD
+ * :py:data:`inspect.CO_METHOD`
+
+ * * .. c:macro:: CO_FUTURE_DIVISION
+ * no effect (:py:data:`__future__.division`)
+ * * .. c:macro:: CO_FUTURE_ABSOLUTE_IMPORT
+ * no effect (:py:data:`__future__.absolute_import`)
+ * * .. c:macro:: CO_FUTURE_WITH_STATEMENT
+ * no effect (:py:data:`__future__.with_statement`)
+ * * .. c:macro:: CO_FUTURE_PRINT_FUNCTION
+ * no effect (:py:data:`__future__.print_function`)
+ * * .. c:macro:: CO_FUTURE_UNICODE_LITERALS
+ * no effect (:py:data:`__future__.unicode_literals`)
+ * * .. c:macro:: CO_FUTURE_GENERATOR_STOP
+ * no effect (:py:data:`__future__.generator_stop`)
+ * * .. c:macro:: CO_FUTURE_ANNOTATIONS
+ * :py:data:`__future__.annotations`
+
+
Extra information
-----------------
diff --git a/Doc/c-api/init.rst b/Doc/c-api/init.rst
index 3106bf9808f..409539dec17 100644
--- a/Doc/c-api/init.rst
+++ b/Doc/c-api/init.rst
@@ -1250,7 +1250,7 @@ All of the following functions must be called after :c:func:`Py_Initialize`.
.. c:function:: void PyInterpreterState_Clear(PyInterpreterState *interp)
Reset all information in an interpreter state object. There must be
- an :term:`attached thread state` for the the interpreter.
+ an :term:`attached thread state` for the interpreter.
.. audit-event:: cpython.PyInterpreterState_Clear "" c.PyInterpreterState_Clear
@@ -2277,6 +2277,18 @@ The C-API provides a basic mutual exclusion lock.
.. versionadded:: 3.13
+.. c:function:: int PyMutex_IsLocked(PyMutex *m)
+
+ Returns non-zero if the mutex *m* is currently locked, zero otherwise.
+
+ .. note::
+
+ This function is intended for use in assertions and debugging only and
+ should not be used to make concurrency control decisions, as the lock
+ state may change immediately after the check.
+
+ .. versionadded:: next
+
.. _python-critical-section-api:
Python Critical Section API
diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst
index 25d9e62e387..2d0bda76697 100644
--- a/Doc/c-api/long.rst
+++ b/Doc/c-api/long.rst
@@ -439,7 +439,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate.
All *n_bytes* of the buffer are written: large buffers are padded with
zeroes.
- If the returned value is greater than than *n_bytes*, the value was
+ If the returned value is greater than *n_bytes*, the value was
truncated: as many of the lowest bits of the value as could fit are written,
and the higher bits are ignored. This matches the typical behavior
of a C-style downcast.
diff --git a/Doc/c-api/object.rst b/Doc/c-api/object.rst
index 0fd159f1eb8..21fa1491b33 100644
--- a/Doc/c-api/object.rst
+++ b/Doc/c-api/object.rst
@@ -197,6 +197,13 @@ Object Protocol
in favour of using :c:func:`PyObject_DelAttr`, but there are currently no
plans to remove it.
+ The function must not be called with ``NULL`` *v* and an an exception set.
+ This case can arise from forgetting ``NULL`` checks and would delete the
+ attribute.
+
+ .. versionchanged:: next
+ Must not be called with NULL value if an exception is set.
+
.. c:function:: int PyObject_SetAttrString(PyObject *o, const char *attr_name, PyObject *v)
@@ -207,6 +214,10 @@ Object Protocol
If *v* is ``NULL``, the attribute is deleted, but this feature is
deprecated in favour of using :c:func:`PyObject_DelAttrString`.
+ The function must not be called with ``NULL`` *v* and an an exception set.
+ This case can arise from forgetting ``NULL`` checks and would delete the
+ attribute.
+
The number of different attribute names passed to this function
should be kept small, usually by using a statically allocated string
as *attr_name*.
@@ -215,6 +226,10 @@ Object Protocol
For more details, see :c:func:`PyUnicode_InternFromString`, which may be
used internally to create a key object.
+ .. versionchanged:: next
+ Must not be called with NULL value if an exception is set.
+
+
.. c:function:: int PyObject_GenericSetAttr(PyObject *o, PyObject *name, PyObject *value)
Generic attribute setter and deleter function that is meant
diff --git a/Doc/c-api/structures.rst b/Doc/c-api/structures.rst
index 987bc167c68..58dd915e04f 100644
--- a/Doc/c-api/structures.rst
+++ b/Doc/c-api/structures.rst
@@ -28,18 +28,52 @@ under :ref:`reference counting <countingrefs>`.
object. In a normal "release" build, it contains only the object's
reference count and a pointer to the corresponding type object.
Nothing is actually declared to be a :c:type:`PyObject`, but every pointer
- to a Python object can be cast to a :c:expr:`PyObject*`. Access to the
- members must be done by using the macros :c:macro:`Py_REFCNT` and
- :c:macro:`Py_TYPE`.
+ to a Python object can be cast to a :c:expr:`PyObject*`.
+
+ The members must not be accessed directly; instead use macros such as
+ :c:macro:`Py_REFCNT` and :c:macro:`Py_TYPE`.
+
+ .. c:member:: Py_ssize_t ob_refcnt
+
+ The object's reference count, as returned by :c:macro:`Py_REFCNT`.
+ Do not use this field directly; instead use functions and macros such as
+ :c:macro:`!Py_REFCNT`, :c:func:`Py_INCREF` and :c:func:`Py_DecRef`.
+
+ The field type may be different from ``Py_ssize_t``, depending on
+ build configuration and platform.
+
+ .. c:member:: PyTypeObject* ob_type
+
+ The object's type.
+ Do not use this field directly; use :c:macro:`Py_TYPE` and
+ :c:func:`Py_SET_TYPE` instead.
.. c:type:: PyVarObject
- This is an extension of :c:type:`PyObject` that adds the :c:member:`~PyVarObject.ob_size`
- field. This is only used for objects that have some notion of *length*.
- This type does not often appear in the Python/C API.
- Access to the members must be done by using the macros
- :c:macro:`Py_REFCNT`, :c:macro:`Py_TYPE`, and :c:macro:`Py_SIZE`.
+ An extension of :c:type:`PyObject` that adds the
+ :c:member:`~PyVarObject.ob_size` field.
+ This is intended for objects that have some notion of *length*.
+
+ As with :c:type:`!PyObject`, the members must not be accessed directly;
+ instead use macros such as :c:macro:`Py_SIZE`, :c:macro:`Py_REFCNT` and
+ :c:macro:`Py_TYPE`.
+
+ .. c:member:: Py_ssize_t ob_size
+
+ A size field, whose contents should be considered an object's internal
+ implementation detail.
+
+ Do not use this field directly; use :c:macro:`Py_SIZE` instead.
+
+ Object creation functions such as :c:func:`PyObject_NewVar` will
+ generally set this field to the requested size (number of items).
+ After creation, arbitrary values can be stored in :c:member:`!ob_size`
+ using :c:macro:`Py_SET_SIZE`.
+
+ To get an object's publicly exposed length, as returned by
+ the Python function :py:func:`len`, use :c:func:`PyObject_Length`
+ instead.
.. c:macro:: PyObject_HEAD
@@ -103,9 +137,8 @@ under :ref:`reference counting <countingrefs>`.
Get the type of the Python object *o*.
- Return a :term:`borrowed reference`.
-
- Use the :c:func:`Py_SET_TYPE` function to set an object type.
+ The returned reference is :term:`borrowed <borrowed reference>` from *o*.
+ Do not release it with :c:func:`Py_DECREF` or similar.
.. versionchanged:: 3.11
:c:func:`Py_TYPE()` is changed to an inline static function.
@@ -122,16 +155,26 @@ under :ref:`reference counting <countingrefs>`.
.. c:function:: void Py_SET_TYPE(PyObject *o, PyTypeObject *type)
- Set the object *o* type to *type*.
+ Set the type of object *o* to *type*, without any checking or reference
+ counting.
+
+ This is a very low-level operation.
+ Consider instead setting the Python attribute :attr:`~object.__class__`
+ using :c:func:`PyObject_SetAttrString` or similar.
+
+ Note that assigning an incompatible type can lead to undefined behavior.
+
+ If *type* is a :ref:`heap type <heap-types>`, the caller must create a
+ new reference to it.
+ Similarly, if the old type of *o* is a heap type, the caller must release
+ a reference to that type.
.. versionadded:: 3.9
.. c:function:: Py_ssize_t Py_SIZE(PyVarObject *o)
- Get the size of the Python object *o*.
-
- Use the :c:func:`Py_SET_SIZE` function to set an object size.
+ Get the :c:member:`~PyVarObject.ob_size` field of *o*.
.. versionchanged:: 3.11
:c:func:`Py_SIZE()` is changed to an inline static function.
@@ -140,7 +183,7 @@ under :ref:`reference counting <countingrefs>`.
.. c:function:: void Py_SET_SIZE(PyVarObject *o, Py_ssize_t size)
- Set the object *o* size to *size*.
+ Set the :c:member:`~PyVarObject.ob_size` field of *o* to *size*.
.. versionadded:: 3.9
diff --git a/Doc/c-api/typeobj.rst b/Doc/c-api/typeobj.rst
index af2bead3bb5..060d6f60174 100644
--- a/Doc/c-api/typeobj.rst
+++ b/Doc/c-api/typeobj.rst
@@ -492,9 +492,9 @@ metatype) initializes :c:member:`~PyTypeObject.tp_itemsize`, which means that it
type objects) *must* have the :c:member:`~PyVarObject.ob_size` field.
-.. c:member:: Py_ssize_t PyObject.ob_refcnt
+:c:member:`PyObject.ob_refcnt`
- This is the type object's reference count, initialized to ``1`` by the
+ The type object's reference count is initialized to ``1`` by the
``PyObject_HEAD_INIT`` macro. Note that for :ref:`statically allocated type
objects <static-types>`, the type's instances (objects whose :c:member:`~PyObject.ob_type`
points back to the type) do *not* count as references. But for
@@ -506,7 +506,7 @@ type objects) *must* have the :c:member:`~PyVarObject.ob_size` field.
This field is not inherited by subtypes.
-.. c:member:: PyTypeObject* PyObject.ob_type
+:c:member:`PyObject.ob_type`
This is the type's type, in other words its metatype. It is initialized by the
argument to the ``PyObject_HEAD_INIT`` macro, and its value should normally be
@@ -532,14 +532,13 @@ type objects) *must* have the :c:member:`~PyVarObject.ob_size` field.
PyVarObject Slots
-----------------
-.. c:member:: Py_ssize_t PyVarObject.ob_size
+:c:member:`PyVarObject.ob_size`
For :ref:`statically allocated type objects <static-types>`, this should be
initialized to zero. For :ref:`dynamically allocated type objects
<heap-types>`, this field has a special internal meaning.
- This field should be accessed using the :c:func:`Py_SIZE()` and
- :c:func:`Py_SET_SIZE()` macros.
+ This field should be accessed using the :c:func:`Py_SIZE()` macro.
**Inheritance:**
diff --git a/Doc/c-api/veryhigh.rst b/Doc/c-api/veryhigh.rst
index 1ef4181d52e..fb07fec7eff 100644
--- a/Doc/c-api/veryhigh.rst
+++ b/Doc/c-api/veryhigh.rst
@@ -361,7 +361,7 @@ the same library that the Python runtime is using.
:py:mod:`!ast` Python module, which exports these constants under
the same names.
- .. c:var:: int CO_FUTURE_DIVISION
-
- This bit can be set in *flags* to cause division operator ``/`` to be
- interpreted as "true division" according to :pep:`238`.
+ The "``PyCF``" flags above can be combined with "``CO_FUTURE``" flags such
+ as :c:macro:`CO_FUTURE_ANNOTATIONS` to enable features normally
+ selectable using :ref:`future statements <future>`.
+ See :ref:`c_codeobject_flags` for a complete list.
diff --git a/Doc/conf.py b/Doc/conf.py
index 161c2986441..8b2a8f20fcc 100644
--- a/Doc/conf.py
+++ b/Doc/conf.py
@@ -635,13 +635,14 @@ ogp_social_cards = { # Used when matplotlib is installed
'image': '_static/og-image.png',
'line_color': '#3776ab',
}
-ogp_custom_meta_tags = [
- '<meta name="theme-color" content="#3776ab">',
-]
-if 'create-social-cards' not in tags: # noqa: F821
- # Define a static preview image when not creating social cards
- ogp_image = '_static/og-image.png'
- ogp_custom_meta_tags += [
- '<meta property="og:image:width" content="200">',
- '<meta property="og:image:height" content="200">',
+if 'builder_html' in tags: # noqa: F821
+ ogp_custom_meta_tags = [
+ '<meta name="theme-color" content="#3776ab">',
]
+ if 'create-social-cards' not in tags: # noqa: F821
+ # Define a static preview image when not creating social cards
+ ogp_image = '_static/og-image.png'
+ ogp_custom_meta_tags += [
+ '<meta property="og:image:width" content="200">',
+ '<meta property="og:image:height" content="200">',
+ ]
diff --git a/Doc/deprecations/c-api-pending-removal-in-3.15.rst b/Doc/deprecations/c-api-pending-removal-in-3.15.rst
index b87f0a5ecde..a3e335ecaf4 100644
--- a/Doc/deprecations/c-api-pending-removal-in-3.15.rst
+++ b/Doc/deprecations/c-api-pending-removal-in-3.15.rst
@@ -1,7 +1,6 @@
Pending removal in Python 3.15
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-* The bundled copy of ``libmpdecimal``.
* The :c:func:`!PyImport_ImportModuleNoBlock`:
Use :c:func:`PyImport_ImportModule` instead.
* :c:func:`PyWeakref_GetObject` and :c:func:`PyWeakref_GET_OBJECT`:
diff --git a/Doc/deprecations/c-api-pending-removal-in-3.16.rst b/Doc/deprecations/c-api-pending-removal-in-3.16.rst
new file mode 100644
index 00000000000..9453f83799c
--- /dev/null
+++ b/Doc/deprecations/c-api-pending-removal-in-3.16.rst
@@ -0,0 +1,4 @@
+Pending removal in Python 3.16
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* The bundled copy of ``libmpdec``.
diff --git a/Doc/deprecations/pending-removal-in-3.15.rst b/Doc/deprecations/pending-removal-in-3.15.rst
index a76d06cce12..c5ca599bb04 100644
--- a/Doc/deprecations/pending-removal-in-3.15.rst
+++ b/Doc/deprecations/pending-removal-in-3.15.rst
@@ -97,6 +97,8 @@ Pending removal in Python 3.15
After eight years in the :mod:`typing` module,
it has yet to be supported by any major type checker.
+* :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse` modules.
+
* :mod:`wave`:
* The ``getmark()``, ``setmark()`` and ``getmarkers()`` methods of
diff --git a/Doc/deprecations/pending-removal-in-future.rst b/Doc/deprecations/pending-removal-in-future.rst
index 4c4a368baca..edb672ed8ad 100644
--- a/Doc/deprecations/pending-removal-in-future.rst
+++ b/Doc/deprecations/pending-removal-in-future.rst
@@ -89,8 +89,6 @@ although there is currently no date scheduled for their removal.
underscore.
(Contributed by Serhiy Storchaka in :gh:`91760`.)
-* :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse` modules.
-
* :mod:`shutil`: :func:`~shutil.rmtree`'s *onerror* parameter is deprecated in
Python 3.12; use the *onexc* parameter instead.
diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst
index f14690de4f8..3bbee33bd50 100644
--- a/Doc/extending/newtypes_tutorial.rst
+++ b/Doc/extending/newtypes_tutorial.rst
@@ -277,7 +277,7 @@ be an instance of a subclass.
The explicit cast to ``CustomObject *`` above is needed because we defined
``Custom_dealloc`` to take a ``PyObject *`` argument, as the ``tp_dealloc``
function pointer expects to receive a ``PyObject *`` argument.
- By assigning to the the ``tp_dealloc`` slot of a type, we declare
+ By assigning to the ``tp_dealloc`` slot of a type, we declare
that it can only be called with instances of our ``CustomObject``
class, so the cast to ``(CustomObject *)`` is safe.
This is object-oriented polymorphism, in C!
diff --git a/Doc/howto/functional.rst b/Doc/howto/functional.rst
index 78e56e0c64f..053558e3890 100644
--- a/Doc/howto/functional.rst
+++ b/Doc/howto/functional.rst
@@ -1217,7 +1217,7 @@ flow inside a program. The book uses Scheme for its examples, but many of the
design approaches described in these chapters are applicable to functional-style
Python code.
-https://www.defmacro.org/ramblings/fp.html: A general introduction to functional
+https://defmacro.org/2006/06/19/fp.html: A general introduction to functional
programming that uses Java examples and has a lengthy historical introduction.
https://en.wikipedia.org/wiki/Functional_programming: General Wikipedia entry
diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst
index fbc426ba1d7..7da6dc8a397 100644
--- a/Doc/howto/isolating-extensions.rst
+++ b/Doc/howto/isolating-extensions.rst
@@ -626,8 +626,7 @@ Open Issues
Several issues around per-module state and heap types are still open.
-Discussions about improving the situation are best held on the `capi-sig
-mailing list <https://mail.python.org/mailman3/lists/capi-sig.python.org/>`__.
+Discussions about improving the situation are best held on the `discuss forum under c-api tag <https://discuss.python.org/c/core-dev/c-api/30>`__.
Per-Class Scope
diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst
index 7d64a02358a..ae2697fbce3 100644
--- a/Doc/howto/logging-cookbook.rst
+++ b/Doc/howto/logging-cookbook.rst
@@ -4078,6 +4078,68 @@ lines. With this approach, you get better output:
WARNING:demo: 1/0
WARNING:demo:ZeroDivisionError: division by zero
+How to uniformly handle newlines in logging output
+--------------------------------------------------
+
+Usually, messages that are logged (say to console or file) consist of a single
+line of text. However, sometimes there is a need to handle messages with
+multiple lines - whether because a logging format string contains newlines, or
+logged data contains newlines. If you want to handle such messages uniformly, so
+that each line in the logged message appears uniformly formatted as if it was
+logged separately, you can do this using a handler mixin, as in the following
+snippet:
+
+.. code-block:: python
+
+ # Assume this is in a module mymixins.py
+ import copy
+
+ class MultilineMixin:
+ def emit(self, record):
+ s = record.getMessage()
+ if '\n' not in s:
+ super().emit(record)
+ else:
+ lines = s.splitlines()
+ rec = copy.copy(record)
+ rec.args = None
+ for line in lines:
+ rec.msg = line
+ super().emit(rec)
+
+You can use the mixin as in the following script:
+
+.. code-block:: python
+
+ import logging
+
+ from mymixins import MultilineMixin
+
+ logger = logging.getLogger(__name__)
+
+ class StreamHandler(MultilineMixin, logging.StreamHandler):
+ pass
+
+ if __name__ == '__main__':
+ logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-9s %(message)s',
+ handlers = [StreamHandler()])
+ logger.debug('Single line')
+ logger.debug('Multiple lines:\nfool me once ...')
+ logger.debug('Another single line')
+ logger.debug('Multiple lines:\n%s', 'fool me ...\ncan\'t get fooled again')
+
+The script, when run, prints something like:
+
+.. code-block:: text
+
+ 2025-07-02 13:54:47,234 DEBUG Single line
+ 2025-07-02 13:54:47,234 DEBUG Multiple lines:
+ 2025-07-02 13:54:47,234 DEBUG fool me once ...
+ 2025-07-02 13:54:47,234 DEBUG Another single line
+ 2025-07-02 13:54:47,234 DEBUG Multiple lines:
+ 2025-07-02 13:54:47,234 DEBUG fool me ...
+ 2025-07-02 13:54:47,234 DEBUG can't get fooled again
+
.. patterns-to-avoid:
diff --git a/Doc/installing/index.rst b/Doc/installing/index.rst
index a46c1caefe4..3a485a43a5a 100644
--- a/Doc/installing/index.rst
+++ b/Doc/installing/index.rst
@@ -188,7 +188,7 @@ switch::
Once the Development & Deployment part of PPUG is fleshed out, some of
those sections should be linked from new questions here (most notably,
we should have a question about avoiding depending on PyPI that links to
- https://packaging.python.org/en/latest/mirrors/)
+ https://packaging.python.org/en/latest/guides/index-mirrors-and-caches/)
Common installation issues
diff --git a/Doc/library/__future__.rst b/Doc/library/__future__.rst
index 4f3b663006f..5d916b30112 100644
--- a/Doc/library/__future__.rst
+++ b/Doc/library/__future__.rst
@@ -37,38 +37,52 @@ No feature description will ever be deleted from :mod:`__future__`. Since its
introduction in Python 2.1 the following features have found their way into the
language using this mechanism:
-+------------------+-------------+--------------+---------------------------------------------+
-| feature | optional in | mandatory in | effect |
-+==================+=============+==============+=============================================+
-| nested_scopes | 2.1.0b1 | 2.2 | :pep:`227`: |
-| | | | *Statically Nested Scopes* |
-+------------------+-------------+--------------+---------------------------------------------+
-| generators | 2.2.0a1 | 2.3 | :pep:`255`: |
-| | | | *Simple Generators* |
-+------------------+-------------+--------------+---------------------------------------------+
-| division | 2.2.0a2 | 3.0 | :pep:`238`: |
-| | | | *Changing the Division Operator* |
-+------------------+-------------+--------------+---------------------------------------------+
-| absolute_import | 2.5.0a1 | 3.0 | :pep:`328`: |
-| | | | *Imports: Multi-Line and Absolute/Relative* |
-+------------------+-------------+--------------+---------------------------------------------+
-| with_statement | 2.5.0a1 | 2.6 | :pep:`343`: |
-| | | | *The "with" Statement* |
-+------------------+-------------+--------------+---------------------------------------------+
-| print_function | 2.6.0a2 | 3.0 | :pep:`3105`: |
-| | | | *Make print a function* |
-+------------------+-------------+--------------+---------------------------------------------+
-| unicode_literals | 2.6.0a2 | 3.0 | :pep:`3112`: |
-| | | | *Bytes literals in Python 3000* |
-+------------------+-------------+--------------+---------------------------------------------+
-| generator_stop | 3.5.0b1 | 3.7 | :pep:`479`: |
-| | | | *StopIteration handling inside generators* |
-+------------------+-------------+--------------+---------------------------------------------+
-| annotations | 3.7.0b1 | Never [1]_ | :pep:`563`: |
-| | | | *Postponed evaluation of annotations*, |
-| | | | :pep:`649`: *Deferred evaluation of |
-| | | | annotations using descriptors* |
-+------------------+-------------+--------------+---------------------------------------------+
+
+.. list-table::
+ :widths: auto
+ :header-rows: 1
+
+ * * feature
+ * optional in
+ * mandatory in
+ * effect
+ * * .. data:: nested_scopes
+ * 2.1.0b1
+ * 2.2
+ * :pep:`227`: *Statically Nested Scopes*
+ * * .. data:: generators
+ * 2.2.0a1
+ * 2.3
+ * :pep:`255`: *Simple Generators*
+ * * .. data:: division
+ * 2.2.0a2
+ * 3.0
+ * :pep:`238`: *Changing the Division Operator*
+ * * .. data:: absolute_import
+ * 2.5.0a1
+ * 3.0
+ * :pep:`328`: *Imports: Multi-Line and Absolute/Relative*
+ * * .. data:: with_statement
+ * 2.5.0a1
+ * 2.6
+ * :pep:`343`: *The “with” Statement*
+ * * .. data:: print_function
+ * 2.6.0a2
+ * 3.0
+ * :pep:`3105`: *Make print a function*
+ * * .. data:: unicode_literals
+ * 2.6.0a2
+ * 3.0
+ * :pep:`3112`: *Bytes literals in Python 3000*
+ * * .. data:: generator_stop
+ * 3.5.0b1
+ * 3.7
+ * :pep:`479`: *StopIteration handling inside generators*
+ * * .. data:: annotations
+ * 3.7.0b1
+ * Never [1]_
+ * :pep:`563`: *Postponed evaluation of annotations*,
+ :pep:`649`: *Deferred evaluation of annotations using descriptors*
.. XXX Adding a new entry? Remember to update simple_stmts.rst, too.
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index a03d88092db..f189f6b8fa8 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -839,23 +839,11 @@ how the command-line arguments should be handled. The supplied actions are:
>>> parser.parse_args(['--version'])
PROG 2.0
-Only actions that consume command-line arguments (e.g. ``'store'``,
-``'append'`` or ``'extend'``) can be used with positional arguments.
-
-.. class:: BooleanOptionalAction
-
- You may also specify an arbitrary action by passing an :class:`Action` subclass or
- other object that implements the same interface. The :class:`!BooleanOptionalAction`
- is available in :mod:`!argparse` and adds support for boolean actions such as
- ``--foo`` and ``--no-foo``::
-
- >>> import argparse
- >>> parser = argparse.ArgumentParser()
- >>> parser.add_argument('--foo', action=argparse.BooleanOptionalAction)
- >>> parser.parse_args(['--no-foo'])
- Namespace(foo=False)
-
- .. versionadded:: 3.9
+You may also specify an arbitrary action by passing an :class:`Action` subclass
+(e.g. :class:`BooleanOptionalAction`) or other object that implements the same
+interface. Only actions that consume command-line arguments (e.g. ``'store'``,
+``'append'``, ``'extend'``, or custom actions with non-zero ``nargs``) can be used
+with positional arguments.
The recommended way to create a custom action is to extend :class:`Action`,
overriding the :meth:`!__call__` method and optionally the :meth:`!__init__` and
@@ -1429,6 +1417,21 @@ this API may be passed as the ``action`` parameter to
and return a string which will be used when printing the usage of the program.
If such method is not provided, a sensible default will be used.
+.. class:: BooleanOptionalAction
+
+ A subclass of :class:`Action` for handling boolean flags with positive
+ and negative options. Adding a single argument such as ``--foo`` automatically
+ creates both ``--foo`` and ``--no-foo`` options, storing ``True`` and ``False``
+ respectively::
+
+ >>> import argparse
+ >>> parser = argparse.ArgumentParser()
+ >>> parser.add_argument('--foo', action=argparse.BooleanOptionalAction)
+ >>> parser.parse_args(['--no-foo'])
+ Namespace(foo=False)
+
+ .. versionadded:: 3.9
+
The parse_args() method
-----------------------
diff --git a/Doc/library/codecs.rst b/Doc/library/codecs.rst
index 86511602fa5..b231fa568cf 100644
--- a/Doc/library/codecs.rst
+++ b/Doc/library/codecs.rst
@@ -53,6 +53,14 @@ any codec:
:exc:`UnicodeDecodeError`). Refer to :ref:`codec-base-classes` for more
information on codec error handling.
+.. function:: charmap_build(string)
+
+ Return a mapping suitable for encoding with a custom single-byte encoding.
+ Given a :class:`str` *string* of up to 256 characters representing a
+ decoding table, returns either a compact internal mapping object
+ ``EncodingMap`` or a :class:`dictionary <dict>` mapping character ordinals
+ to byte values. Raises a :exc:`TypeError` on invalid input.
+
The full details for each codec can also be looked up directly:
.. function:: lookup(encoding, /)
diff --git a/Doc/library/compression.zstd.rst b/Doc/library/compression.zstd.rst
index 57ad8e3377f..a901403621b 100644
--- a/Doc/library/compression.zstd.rst
+++ b/Doc/library/compression.zstd.rst
@@ -523,8 +523,14 @@ Advanced parameter control
.. attribute:: compression_level
A high-level means of setting other compression parameters that affect
- the speed and ratio of compressing data. Setting the level to zero uses
- :attr:`COMPRESSION_LEVEL_DEFAULT`.
+ the speed and ratio of compressing data.
+
+ Regular compression levels are greater than ``0``. Values greater than
+ ``20`` are considered "ultra" compression and require more memory than
+ other levels. Negative values can be used to trade off faster compression
+ for worse compression ratios.
+
+ Setting the level to zero uses :attr:`COMPRESSION_LEVEL_DEFAULT`.
.. attribute:: window_log
diff --git a/Doc/library/concurrent.interpreters.rst b/Doc/library/concurrent.interpreters.rst
index 8860418e87a..524d505bcf1 100644
--- a/Doc/library/concurrent.interpreters.rst
+++ b/Doc/library/concurrent.interpreters.rst
@@ -13,17 +13,26 @@
--------------
-
-Introduction
-------------
-
The :mod:`!concurrent.interpreters` module constructs higher-level
interfaces on top of the lower level :mod:`!_interpreters` module.
-.. XXX Add references to the upcoming HOWTO docs in the seealso block.
+The module is primarily meant to provide a basic API for managing
+interpreters (AKA "subinterpreters") and running things in them.
+Running mostly involves switching to an interpreter (in the current
+thread) and calling a function in that execution context.
+
+For concurrency, interpreters themselves (and this module) don't
+provide much more than isolation, which on its own isn't useful.
+Actual concurrency is available separately through
+:mod:`threads <threading>` See `below <interp-concurrency_>`_
.. seealso::
+ :class:`~concurrent.futures.InterpreterPoolExecutor`
+ combines threads with interpreters in a familiar interface.
+
+ .. XXX Add references to the upcoming HOWTO docs in the seealso block.
+
:ref:`isolating-extensions-howto`
how to update an extension module to support multiple interpreters
@@ -41,18 +50,155 @@ interfaces on top of the lower level :mod:`!_interpreters` module.
Key details
-----------
-Before we dive into examples, there are a small number of details
+Before we dive in further, there are a small number of details
to keep in mind about using multiple interpreters:
-* isolated, by default
+* `isolated <interp-isolation_>`_, by default
* no implicit threads
* not all PyPI packages support use in multiple interpreters yet
.. XXX Are there other relevant details to list?
-In the context of multiple interpreters, "isolated" means that
-different interpreters do not share any state. In practice, there is some
-process-global data they all share, but that is managed by the runtime.
+
+.. _interpreters-intro:
+
+Introduction
+------------
+
+An "interpreter" is effectively the execution context of the Python
+runtime. It contains all of the state the runtime needs to execute
+a program. This includes things like the import state and builtins.
+(Each thread, even if there's only the main thread, has some extra
+runtime state, in addition to the current interpreter, related to
+the current exception and the bytecode eval loop.)
+
+The concept and functionality of the interpreter have been a part of
+Python since version 2.2, but the feature was only available through
+the C-API and not well known, and the `isolation <interp-isolation_>`_
+was relatively incomplete until version 3.12.
+
+.. _interp-isolation:
+
+Multiple Interpreters and Isolation
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+A Python implementation may support using multiple interpreters in the
+same process. CPython has this support. Each interpreter is
+effectively isolated from the others (with a limited number of
+carefully managed process-global exceptions to the rule).
+
+That isolation is primarily useful as a strong separation between
+distinct logical components of a program, where you want to have
+careful control of how those components interact.
+
+.. note::
+
+ Interpreters in the same process can technically never be strictly
+ isolated from one another since there are few restrictions on memory
+ access within the same process. The Python runtime makes a best
+ effort at isolation but extension modules may easily violate that.
+ Therefore, do not use multiple interpreters in security-sensitive
+ situations, where they shouldn't have access to each other's data.
+
+Running in an Interpreter
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Running in a different interpreter involves switching to it in the
+current thread and then calling some function. The runtime will
+execute the function using the current interpreter's state. The
+:mod:`!concurrent.interpreters` module provides a basic API for
+creating and managing interpreters, as well as the switch-and-call
+operation.
+
+No other threads are automatically started for the operation.
+There is `a helper <interp-call-in-thread_>`_ for that though.
+There is another dedicated helper for calling the builtin
+:func:`exec` in an interpreter.
+
+When :func:`exec` (or :func:`eval`) are called in an interpreter,
+they run using the interpreter's :mod:`!__main__` module as the
+"globals" namespace. The same is true for functions that aren't
+associated with any module. This is the same as how scripts invoked
+from the command-line run in the :mod:`!__main__` module.
+
+
+.. _interp-concurrency:
+
+Concurrency and Parallelism
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+As noted earlier, interpreters do not provide any concurrency
+on their own. They strictly represent the isolated execution
+context the runtime will use *in the current thread*. That isolation
+makes them similar to processes, but they still enjoy in-process
+efficiency, like threads.
+
+All that said, interpreters do naturally support certain flavors of
+concurrency, as a powerful side effect of that isolation.
+There's a powerful side effect of that isolation. It enables a
+different approach to concurrency than you can take with async or
+threads. It's a similar concurrency model to CSP or the actor model,
+a model which is relatively easy to reason about.
+
+You can take advantage of that concurrency model in a single thread,
+switching back and forth between interpreters, Stackless-style.
+However, this model is more useful when you combine interpreters
+with multiple threads. This mostly involves starting a new thread,
+where you switch to another interpreter and run what you want there.
+
+Each actual thread in Python, even if you're only running in the main
+thread, has its own *current* execution context. Multiple threads can
+use the same interpreter or different ones.
+
+At a high level, you can think of the combination of threads and
+interpreters as threads with opt-in sharing.
+
+As a significant bonus, interpreters are sufficiently isolated that
+they do not share the :term:`GIL`, which means combining threads with
+multiple interpreters enables full multi-core parallelism.
+(This has been the case since Python 3.12.)
+
+Communication Between Interpreters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In practice, multiple interpreters are useful only if we have a way
+to communicate between them. This usually involves some form of
+message passing, but can even mean sharing data in some carefully
+managed way.
+
+With this in mind, the :mod:`!concurrent.interpreters` module provides
+a :class:`queue.Queue` implementation, available through
+:func:`create_queue`.
+
+.. _interp-object-sharing:
+
+"Sharing" Objects
+^^^^^^^^^^^^^^^^^
+
+Any data actually shared between interpreters loses the thread-safety
+provided by the :term:`GIL`. There are various options for dealing with
+this in extension modules. However, from Python code the lack of
+thread-safety means objects can't actually be shared, with a few
+exceptions. Instead, a copy must be created, which means mutable
+objects won't stay in sync.
+
+By default, most objects are copied with :mod:`pickle` when they are
+passed to another interpreter. Nearly all of the immutable builtin
+objects are either directly shared or copied efficiently. For example:
+
+* :const:`None`
+* :class:`bool` (:const:`True` and :const:`False`)
+* :class:`bytes`
+* :class:`str`
+* :class:`int`
+* :class:`float`
+* :class:`tuple` (of similarly supported objects)
+
+There is a small number of Python types that actually share mutable
+data between interpreters:
+
+* :class:`memoryview`
+* :class:`Queue`
Reference
@@ -73,12 +219,19 @@ This module defines the following functions:
.. function:: get_main()
Return an :class:`Interpreter` object for the main interpreter.
+ This is the interpreter the runtime created to run the :term:`REPL`
+ or the script given at the command-line. It is usually the only one.
.. function:: create()
Initialize a new (idle) Python interpreter
and return a :class:`Interpreter` object for it.
+.. function:: create_queue()
+
+ Initialize a new cross-interpreter queue and return a :class:`Queue`
+ object for it.
+
Interpreter objects
^^^^^^^^^^^^^^^^^^^
@@ -94,7 +247,7 @@ Interpreter objects
(read-only)
- The interpreter's ID.
+ The underlying interpreter's ID.
.. attribute:: whence
@@ -113,8 +266,10 @@ Interpreter objects
.. method:: prepare_main(ns=None, **kwargs)
- Bind "shareable" objects in the interpreter's
- :mod:`!__main__` module.
+ Bind objects in the interpreter's :mod:`!__main__` module.
+
+ Some objects are actually shared and some are copied efficiently,
+ but most are copied via :mod:`pickle`. See :ref:`interp-object-sharing`.
.. method:: exec(code, /, dedent=True)
@@ -125,6 +280,8 @@ Interpreter objects
Return the result of calling running the given function in the
interpreter (in the current thread).
+ .. _interp-call-in-thread:
+
.. method:: call_in_thread(callable, /, *args, **kwargs)
Run the given function in the interpreter (in a new thread).
@@ -159,7 +316,36 @@ Exceptions
an object cannot be sent to another interpreter.
-.. XXX Add functions for communicating between interpreters.
+Communicating Between Interpreters
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. class:: Queue(id)
+
+ A wrapper around a low-level, cross-interpreter queue, which
+ implements the :class:`queue.Queue` interface. The underlying queue
+ can only be created through :func:`create_queue`.
+
+ Some objects are actually shared and some are copied efficiently,
+ but most are copied via :mod:`pickle`. See :ref:`interp-object-sharing`.
+
+ .. attribute:: id
+
+ (read-only)
+
+ The queue's ID.
+
+
+.. exception:: QueueEmptyError
+
+ This exception, a subclass of :exc:`queue.Empty`, is raised from
+ :meth:`!Queue.get` and :meth:`!Queue.get_nowait` when the queue
+ is empty.
+
+.. exception:: QueueFullError
+
+ This exception, a subclass of :exc:`queue.Full`, is raised from
+ :meth:`!Queue.put` and :meth:`!Queue.put_nowait` when the queue
+ is full.
Basic usage
@@ -184,6 +370,12 @@ Creating an interpreter and running code in it::
print('spam!')
"""))
+ def run(arg):
+ return arg
+
+ res = interp.call(run, 'spam!')
+ print(res)
+
def run():
print('spam!')
@@ -193,6 +385,3 @@ Creating an interpreter and running code in it::
t = interp.call_in_thread(run)
t.join()
-
-
-.. XXX Explain about object "sharing".
diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst
index 2e513bff651..d39c4ca4a58 100644
--- a/Doc/library/csv.rst
+++ b/Doc/library/csv.rst
@@ -53,7 +53,7 @@ The :mod:`csv` module defines the following functions:
.. index::
single: universal newlines; csv.reader function
-.. function:: reader(csvfile, dialect='excel', **fmtparams)
+.. function:: reader(csvfile, /, dialect='excel', **fmtparams)
Return a :ref:`reader object <reader-objects>` that will process
lines from the given *csvfile*. A csvfile must be an iterable of
@@ -84,7 +84,7 @@ The :mod:`csv` module defines the following functions:
Spam, Lovely Spam, Wonderful Spam
-.. function:: writer(csvfile, dialect='excel', **fmtparams)
+.. function:: writer(csvfile, /, dialect='excel', **fmtparams)
Return a writer object responsible for converting the user's data into delimited
strings on the given file-like object. *csvfile* can be any object with a
@@ -323,8 +323,8 @@ The :mod:`csv` module defines the following constants:
.. data:: QUOTE_MINIMAL
Instructs :class:`writer` objects to only quote those fields which contain
- special characters such as *delimiter*, *quotechar* or any of the characters in
- *lineterminator*.
+ special characters such as *delimiter*, *quotechar*, ``'\r'``, ``'\n'``
+ or any of the characters in *lineterminator*.
.. data:: QUOTE_NONNUMERIC
@@ -342,10 +342,13 @@ The :mod:`csv` module defines the following constants:
.. data:: QUOTE_NONE
- Instructs :class:`writer` objects to never quote fields. When the current
- *delimiter* occurs in output data it is preceded by the current *escapechar*
- character. If *escapechar* is not set, the writer will raise :exc:`Error` if
+ Instructs :class:`writer` objects to never quote fields.
+ When the current *delimiter*, *quotechar*, *escapechar*, ``'\r'``, ``'\n'``
+ or any of the characters in *lineterminator* occurs in output data
+ it is preceded by the current *escapechar* character.
+ If *escapechar* is not set, the writer will raise :exc:`Error` if
any characters that require escaping are encountered.
+ Set *quotechar* to ``None`` to prevent its escaping.
Instructs :class:`reader` objects to perform no special processing of quote characters.
@@ -414,9 +417,16 @@ Dialects support the following attributes:
.. attribute:: Dialect.escapechar
- A one-character string used by the writer to escape the *delimiter* if *quoting*
- is set to :const:`QUOTE_NONE` and the *quotechar* if *doublequote* is
- :const:`False`. On reading, the *escapechar* removes any special meaning from
+ A one-character string used by the writer to escape characters that
+ require escaping:
+
+ * the *delimiter*, the *quotechar*, ``'\r'``, ``'\n'`` and any of the
+ characters in *lineterminator* are escaped if *quoting* is set to
+ :const:`QUOTE_NONE`;
+ * the *quotechar* is escaped if *doublequote* is :const:`False`;
+ * the *escapechar* itself.
+
+ On reading, the *escapechar* removes any special meaning from
the following character. It defaults to :const:`None`, which disables escaping.
.. versionchanged:: 3.11
@@ -436,9 +446,12 @@ Dialects support the following attributes:
.. attribute:: Dialect.quotechar
- A one-character string used to quote fields containing special characters, such
- as the *delimiter* or *quotechar*, or which contain new-line characters. It
- defaults to ``'"'``.
+ A one-character string used to quote fields containing special characters,
+ such as the *delimiter* or the *quotechar*, or which contain new-line
+ characters (``'\r'``, ``'\n'`` or any of the characters in *lineterminator*).
+ It defaults to ``'"'``.
+ Can be set to ``None`` to prevent escaping ``'"'`` if *quoting* is set
+ to :const:`QUOTE_NONE`.
.. versionchanged:: 3.11
An empty *quotechar* is not allowed.
@@ -447,7 +460,8 @@ Dialects support the following attributes:
Controls when quotes should be generated by the writer and recognised by the
reader. It can take on any of the :ref:`QUOTE_\* constants <csv-constants>`
- and defaults to :const:`QUOTE_MINIMAL`.
+ and defaults to :const:`QUOTE_MINIMAL` if *quotechar* is not ``None``,
+ and :const:`QUOTE_NONE` otherwise.
.. attribute:: Dialect.skipinitialspace
diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
index e00fe9c8145..846cece3761 100644
--- a/Doc/library/ctypes.rst
+++ b/Doc/library/ctypes.rst
@@ -2965,7 +2965,7 @@ fields, or any other data types containing pointer type fields.
.. attribute:: is_anonymous
True if this field is anonymous, that is, it contains nested sub-fields
- that should be be merged into a containing structure or union.
+ that should be merged into a containing structure or union.
.. _ctypes-arrays-pointers:
diff --git a/Doc/library/email.compat32-message.rst b/Doc/library/email.compat32-message.rst
index 4285c436e8d..5754c2b65b2 100644
--- a/Doc/library/email.compat32-message.rst
+++ b/Doc/library/email.compat32-message.rst
@@ -181,7 +181,7 @@ Here are the methods of the :class:`Message` class:
:meth:`set_payload` instead.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class its functionality is
+ :class:`~email.message.EmailMessage` class its functionality is
replaced by :meth:`~email.message.EmailMessage.set_content` and the
related ``make`` and ``add`` methods.
@@ -224,7 +224,7 @@ Here are the methods of the :class:`Message` class:
ASCII charset.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class its functionality is
+ :class:`~email.message.EmailMessage` class its functionality is
replaced by :meth:`~email.message.EmailMessage.get_content` and
:meth:`~email.message.EmailMessage.iter_parts`.
@@ -236,7 +236,7 @@ Here are the methods of the :class:`Message` class:
the message's default character set; see :meth:`set_charset` for details.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class its functionality is
+ :class:`~email.message.EmailMessage` class its functionality is
replaced by :meth:`~email.message.EmailMessage.set_content`.
@@ -265,9 +265,9 @@ Here are the methods of the :class:`Message` class:
using that :mailheader:`Content-Transfer-Encoding` and is not modified.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class its functionality is
+ :class:`~email.message.EmailMessage` class its functionality is
replaced by the *charset* parameter of the
- :meth:`email.emailmessage.EmailMessage.set_content` method.
+ :meth:`email.message.EmailMessage.set_content` method.
.. method:: get_charset()
@@ -276,7 +276,7 @@ Here are the methods of the :class:`Message` class:
message's payload.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class it always returns
+ :class:`~email.message.EmailMessage` class it always returns
``None``.
@@ -486,7 +486,7 @@ Here are the methods of the :class:`Message` class:
search instead of :mailheader:`Content-Type`.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class its functionality is
+ :class:`~email.message.EmailMessage` class its functionality is
replaced by the *params* property of the individual header objects
returned by the header access methods.
@@ -524,7 +524,7 @@ Here are the methods of the :class:`Message` class:
to ``False``.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class its functionality is
+ :class:`~email.message.EmailMessage` class its functionality is
replaced by the *params* property of the individual header objects
returned by the header access methods.
@@ -579,7 +579,7 @@ Here are the methods of the :class:`Message` class:
header is also added.
This is a legacy method. On the
- :class:`~email.emailmessage.EmailMessage` class its functionality is
+ :class:`~email.message.EmailMessage` class its functionality is
replaced by the ``make_`` and ``add_`` methods.
diff --git a/Doc/library/email.header.rst b/Doc/library/email.header.rst
index c3392a62b8e..f49885b8785 100644
--- a/Doc/library/email.header.rst
+++ b/Doc/library/email.header.rst
@@ -206,7 +206,7 @@ The :mod:`email.header` module also provides the following convenient functions.
.. note::
- This function exists for for backwards compatibility only. For
+ This function exists for backwards compatibility only. For
new code, we recommend using :class:`email.headerregistry.HeaderRegistry`.
@@ -225,5 +225,5 @@ The :mod:`email.header` module also provides the following convenient functions.
.. note::
- This function exists for for backwards compatibility only, and is
+ This function exists for backwards compatibility only, and is
not recommended for use in new code.
diff --git a/Doc/library/exceptions.rst b/Doc/library/exceptions.rst
index bb72032891e..c09e1615a5b 100644
--- a/Doc/library/exceptions.rst
+++ b/Doc/library/exceptions.rst
@@ -429,7 +429,9 @@ The following exceptions are the exceptions that are usually raised.
* Creating a new Python thread.
* :meth:`Joining <threading.Thread.join>` a running daemon thread.
- * :func:`os.fork`.
+ * :func:`os.fork`,
+ * acquiring a lock such as :class:`threading.Lock`, when it is known that
+ the operation would otherwise deadlock.
See also the :func:`sys.is_finalizing` function.
@@ -440,6 +442,11 @@ The following exceptions are the exceptions that are usually raised.
:meth:`threading.Thread.join` can now raise this exception.
+ .. versionchanged:: next
+
+ This exception may be raised when acquiring :meth:`threading.Lock`
+ or :meth:`threading.RLock`.
+
.. exception:: RecursionError
This exception is derived from :exc:`RuntimeError`. It is raised when the
@@ -1048,7 +1055,7 @@ their subgroups based on the types of the contained exceptions.
subclasses that need a different constructor signature need to
override that rather than :meth:`~object.__init__`. For example, the following
defines an exception group subclass which accepts an exit_code and
- and constructs the group's message from it. ::
+ constructs the group's message from it. ::
class Errors(ExceptionGroup):
def __new__(cls, errors, exit_code):
diff --git a/Doc/library/faulthandler.rst b/Doc/library/faulthandler.rst
index 5058b85bffb..677966a8b2e 100644
--- a/Doc/library/faulthandler.rst
+++ b/Doc/library/faulthandler.rst
@@ -90,7 +90,7 @@ An error will be printed instead of the stack.
Additionally, some compilers do not support :term:`CPython's <CPython>`
implementation of C stack dumps. As a result, a different error may be printed
-instead of the stack, even if the the operating system supports dumping stacks.
+instead of the stack, even if the operating system supports dumping stacks.
.. note::
@@ -228,6 +228,41 @@ handler:
Fatal Python error: Segmentation fault
Current thread 0x00007fb899f39700 (most recent call first):
- File "/home/python/cpython/Lib/ctypes/__init__.py", line 486 in string_at
+ File "/opt/python/Lib/ctypes/__init__.py", line 486 in string_at
File "<stdin>", line 1 in <module>
+
+ Current thread's C stack trace (most recent call first):
+ Binary file "/opt/python/python", at _Py_DumpStack+0x42 [0x5b27f7d7147e]
+ Binary file "/opt/python/python", at +0x32dcbd [0x5b27f7d85cbd]
+ Binary file "/opt/python/python", at +0x32df8a [0x5b27f7d85f8a]
+ Binary file "/usr/lib/libc.so.6", at +0x3def0 [0x77b73226bef0]
+ Binary file "/usr/lib/libc.so.6", at +0x17ef9c [0x77b7323acf9c]
+ Binary file "/opt/python/build/lib.linux-x86_64-3.15/_ctypes.cpython-315d-x86_64-linux-gnu.so", at +0xcdf6 [0x77b7315dddf6]
+ Binary file "/usr/lib/libffi.so.8", at +0x7976 [0x77b73158f976]
+ Binary file "/usr/lib/libffi.so.8", at +0x413c [0x77b73158c13c]
+ Binary file "/usr/lib/libffi.so.8", at ffi_call+0x12e [0x77b73158ef0e]
+ Binary file "/opt/python/build/lib.linux-x86_64-3.15/_ctypes.cpython-315d-x86_64-linux-gnu.so", at +0x15a33 [0x77b7315e6a33]
+ Binary file "/opt/python/build/lib.linux-x86_64-3.15/_ctypes.cpython-315d-x86_64-linux-gnu.so", at +0x164fa [0x77b7315e74fa]
+ Binary file "/opt/python/build/lib.linux-x86_64-3.15/_ctypes.cpython-315d-x86_64-linux-gnu.so", at +0xc624 [0x77b7315dd624]
+ Binary file "/opt/python/python", at _PyObject_MakeTpCall+0xce [0x5b27f7b73883]
+ Binary file "/opt/python/python", at +0x11bab6 [0x5b27f7b73ab6]
+ Binary file "/opt/python/python", at PyObject_Vectorcall+0x23 [0x5b27f7b73b04]
+ Binary file "/opt/python/python", at _PyEval_EvalFrameDefault+0x490c [0x5b27f7cbb302]
+ Binary file "/opt/python/python", at +0x2818e6 [0x5b27f7cd98e6]
+ Binary file "/opt/python/python", at +0x281aab [0x5b27f7cd9aab]
+ Binary file "/opt/python/python", at PyEval_EvalCode+0xc5 [0x5b27f7cd9ba3]
+ Binary file "/opt/python/python", at +0x255957 [0x5b27f7cad957]
+ Binary file "/opt/python/python", at +0x255ab4 [0x5b27f7cadab4]
+ Binary file "/opt/python/python", at _PyEval_EvalFrameDefault+0x6c3e [0x5b27f7cbd634]
+ Binary file "/opt/python/python", at +0x2818e6 [0x5b27f7cd98e6]
+ Binary file "/opt/python/python", at +0x281aab [0x5b27f7cd9aab]
+ Binary file "/opt/python/python", at +0x11b6e1 [0x5b27f7b736e1]
+ Binary file "/opt/python/python", at +0x11d348 [0x5b27f7b75348]
+ Binary file "/opt/python/python", at +0x11d626 [0x5b27f7b75626]
+ Binary file "/opt/python/python", at PyObject_Call+0x20 [0x5b27f7b7565e]
+ Binary file "/opt/python/python", at +0x32a67a [0x5b27f7d8267a]
+ Binary file "/opt/python/python", at +0x32a7f8 [0x5b27f7d827f8]
+ Binary file "/opt/python/python", at +0x32ac1b [0x5b27f7d82c1b]
+ Binary file "/opt/python/python", at Py_RunMain+0x31 [0x5b27f7d82ebe]
+ <truncated rest of calls>
Segmentation fault
diff --git a/Doc/library/fractions.rst b/Doc/library/fractions.rst
index fc7f9a6301a..392b6d40e86 100644
--- a/Doc/library/fractions.rst
+++ b/Doc/library/fractions.rst
@@ -142,7 +142,7 @@ another rational number, or from a string.
.. versionadded:: 3.12
- .. classmethod:: from_float(flt)
+ .. classmethod:: from_float(f)
Alternative constructor which only accepts instances of
:class:`float` or :class:`numbers.Integral`. Beware that
diff --git a/Doc/library/http.cookiejar.rst b/Doc/library/http.cookiejar.rst
index 23ddecf8738..251aea891c3 100644
--- a/Doc/library/http.cookiejar.rst
+++ b/Doc/library/http.cookiejar.rst
@@ -570,7 +570,7 @@ Netscape protocol strictness switches:
Don't allow setting cookies whose path doesn't path-match request URI.
-:attr:`strict_ns_domain` is a collection of flags. Its value is constructed by
+:attr:`~DefaultCookiePolicy.strict_ns_domain` is a collection of flags. Its value is constructed by
or-ing together (for example, ``DomainStrictNoDots|DomainStrictNonDomain`` means
both flags are set).
diff --git a/Doc/library/io.rst b/Doc/library/io.rst
index de5cab5aee6..dfebccb5a9c 100644
--- a/Doc/library/io.rst
+++ b/Doc/library/io.rst
@@ -719,6 +719,9 @@ than raw I/O does.
The optional argument *initial_bytes* is a :term:`bytes-like object` that
contains initial data.
+ Methods may be used from multiple threads without external locking in
+ :term:`free threading` builds.
+
:class:`BytesIO` provides or overrides these methods in addition to those
from :class:`BufferedIOBase` and :class:`IOBase`:
diff --git a/Doc/library/math.rst b/Doc/library/math.rst
index ecb1d4102ca..bf7a00549fc 100644
--- a/Doc/library/math.rst
+++ b/Doc/library/math.rst
@@ -59,6 +59,7 @@ noted otherwise, all return values are floats.
:func:`isnan(x) <isnan>` Check if *x* is a NaN (not a number)
:func:`ldexp(x, i) <ldexp>` ``x * (2**i)``, inverse of function :func:`frexp`
:func:`nextafter(x, y, steps) <nextafter>` Floating-point value *steps* steps after *x* towards *y*
+:func:`signbit(x) <signbit>` Check if *x* is a negative number
:func:`ulp(x) <ulp>` Value of the least significant bit of *x*
**Power, exponential and logarithmic functions**
@@ -431,6 +432,15 @@ Floating point manipulation functions
Added the *steps* argument.
+.. function:: signbit(x)
+
+ Return ``True`` if the sign of *x* is negative and ``False`` otherwise.
+
+ This is useful to detect the sign bit of zeroes, infinities and NaNs.
+
+ .. versionadded:: next
+
+
.. function:: ulp(x)
Return the value of the least significant bit of the float *x*:
@@ -794,7 +804,7 @@ Constants
The mathematical constant *τ* = 6.283185..., to available precision.
Tau is a circle constant equal to 2\ *π*, the ratio of a circle's circumference to
its radius. To learn more about Tau, check out Vi Hart's video `Pi is (still)
- Wrong <https://www.youtube.com/watch?v=jG7vhMMXagQ>`_, and start celebrating
+ Wrong <https://vimeo.com/147792667>`_, and start celebrating
`Tau day <https://tauday.com/>`_ by eating twice as much pie!
.. versionadded:: 3.6
diff --git a/Doc/library/mmap.rst b/Doc/library/mmap.rst
index 4e20c07331a..8fca79b23e4 100644
--- a/Doc/library/mmap.rst
+++ b/Doc/library/mmap.rst
@@ -269,7 +269,7 @@ To map anonymous memory, -1 should be passed as the fileno along with the length
Resizing a map created with *access* of :const:`ACCESS_READ` or
:const:`ACCESS_COPY`, will raise a :exc:`TypeError` exception.
- Resizing a map created with with *trackfd* set to ``False``,
+ Resizing a map created with *trackfd* set to ``False``,
will raise a :exc:`ValueError` exception.
**On Windows**: Resizing the map will raise an :exc:`OSError` if there are other
diff --git a/Doc/library/multiprocessing.rst b/Doc/library/multiprocessing.rst
index fc3c1134f97..546876bd925 100644
--- a/Doc/library/multiprocessing.rst
+++ b/Doc/library/multiprocessing.rst
@@ -1118,7 +1118,9 @@ Miscellaneous
Return a context object which has the same attributes as the
:mod:`multiprocessing` module.
- If *method* is ``None`` then the default context is returned.
+ If *method* is ``None`` then the default context is returned. Note that if
+ the global start method has not been set, this will set it to the
+ default method.
Otherwise *method* should be ``'fork'``, ``'spawn'``,
``'forkserver'``. :exc:`ValueError` is raised if the specified
start method is not available. See :ref:`multiprocessing-start-methods`.
@@ -1129,10 +1131,10 @@ Miscellaneous
Return the name of start method used for starting processes.
- If the start method has not been fixed and *allow_none* is false,
- then the start method is fixed to the default and the name is
- returned. If the start method has not been fixed and *allow_none*
- is true then ``None`` is returned.
+ If the global start method has not been set and *allow_none* is
+ ``False``, then the start method is set to the default and the name
+ is returned. If the start method has not been set and *allow_none* is
+ ``True`` then ``None`` is returned.
The return value can be ``'fork'``, ``'spawn'``, ``'forkserver'``
or ``None``. See :ref:`multiprocessing-start-methods`.
diff --git a/Doc/library/pathlib.rst b/Doc/library/pathlib.rst
index 86351e65dc4..47986a2d960 100644
--- a/Doc/library/pathlib.rst
+++ b/Doc/library/pathlib.rst
@@ -1985,7 +1985,7 @@ The :mod:`pathlib.types` module provides types for static type checking.
If *follow_symlinks* is ``False``, return ``True`` only if the path
is a file (without following symlinks); return ``False`` if the path
- is a directory or other other non-file, or if it doesn't exist.
+ is a directory or other non-file, or if it doesn't exist.
.. method:: is_symlink()
diff --git a/Doc/library/pyexpat.rst b/Doc/library/pyexpat.rst
index 2d57cff10a9..5506ac828e5 100644
--- a/Doc/library/pyexpat.rst
+++ b/Doc/library/pyexpat.rst
@@ -16,11 +16,10 @@
references to these attributes should be marked using the :member: role.
-.. warning::
+.. note::
- The :mod:`pyexpat` module is not secure against maliciously
- constructed data. If you need to parse untrusted or unauthenticated data see
- :ref:`xml-vulnerabilities`.
+ If you need to parse untrusted or unauthenticated data, see
+ :ref:`xml-security`.
.. index:: single: Expat
diff --git a/Doc/library/random.rst b/Doc/library/random.rst
index ef0cfb0e76c..b1120b3a4d8 100644
--- a/Doc/library/random.rst
+++ b/Doc/library/random.rst
@@ -447,6 +447,11 @@ Alternative Generator
Override this method in subclasses to customise the
:meth:`~random.getrandbits` behaviour of :class:`!Random` instances.
+ .. method:: Random.randbytes(n)
+
+ Override this method in subclasses to customise the
+ :meth:`~random.randbytes` behaviour of :class:`!Random` instances.
+
.. class:: SystemRandom([seed])
diff --git a/Doc/library/security_warnings.rst b/Doc/library/security_warnings.rst
index a573c98f73e..70c359cc1c0 100644
--- a/Doc/library/security_warnings.rst
+++ b/Doc/library/security_warnings.rst
@@ -28,7 +28,7 @@ The following modules have specific security considerations:
<subprocess-security>`
* :mod:`tempfile`: :ref:`mktemp is deprecated due to vulnerability to race
conditions <tempfile-mktemp-deprecated>`
-* :mod:`xml`: :ref:`XML vulnerabilities <xml-vulnerabilities>`
+* :mod:`xml`: :ref:`XML security <xml-security>`
* :mod:`zipfile`: :ref:`maliciously prepared .zip files can cause disk volume
exhaustion <zipfile-resources-limitations>`
diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst
index 2dde40c9d92..dde38498206 100644
--- a/Doc/library/shutil.rst
+++ b/Doc/library/shutil.rst
@@ -618,7 +618,8 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
*format* is the archive format: one of
"zip" (if the :mod:`zlib` module is available), "tar", "gztar" (if the
:mod:`zlib` module is available), "bztar" (if the :mod:`bz2` module is
- available), or "xztar" (if the :mod:`lzma` module is available).
+ available), "xztar" (if the :mod:`lzma` module is available), or "zstdtar"
+ (if the :mod:`compression.zstd` module is available).
*root_dir* is a directory that will be the root directory of the
archive, all paths in the archive will be relative to it; for example,
@@ -673,6 +674,8 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
- *gztar*: gzip'ed tar-file (if the :mod:`zlib` module is available).
- *bztar*: bzip2'ed tar-file (if the :mod:`bz2` module is available).
- *xztar*: xz'ed tar-file (if the :mod:`lzma` module is available).
+ - *zstdtar*: Zstandard compressed tar-file (if the :mod:`compression.zstd`
+ module is available).
You can register new formats or provide your own archiver for any existing
formats, by using :func:`register_archive_format`.
@@ -716,8 +719,8 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
*extract_dir* is the name of the target directory where the archive is
unpacked. If not provided, the current working directory is used.
- *format* is the archive format: one of "zip", "tar", "gztar", "bztar", or
- "xztar". Or any other format registered with
+ *format* is the archive format: one of "zip", "tar", "gztar", "bztar",
+ "xztar", or "zstdtar". Or any other format registered with
:func:`register_unpack_format`. If not provided, :func:`unpack_archive`
will use the archive file name extension and see if an unpacker was
registered for that extension. In case none is found,
@@ -789,6 +792,8 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules.
- *gztar*: gzip'ed tar-file (if the :mod:`zlib` module is available).
- *bztar*: bzip2'ed tar-file (if the :mod:`bz2` module is available).
- *xztar*: xz'ed tar-file (if the :mod:`lzma` module is available).
+ - *zstdtar*: Zstandard compressed tar-file (if the :mod:`compression.zstd`
+ module is available).
You can register new formats or provide your own unpacker for any existing
formats, by using :func:`register_unpack_format`.
diff --git a/Doc/library/socketserver.rst b/Doc/library/socketserver.rst
index 753f12460b8..7fb629f7d2f 100644
--- a/Doc/library/socketserver.rst
+++ b/Doc/library/socketserver.rst
@@ -543,7 +543,7 @@ objects that simplify communication by providing the standard file interface)::
The difference is that the ``readline()`` call in the second handler will call
``recv()`` multiple times until it encounters a newline character, while the
-the first handler had to use a ``recv()`` loop to accumulate data until a
+first handler had to use a ``recv()`` loop to accumulate data until a
newline itself. If it had just used a single ``recv()`` without the loop it
would just have returned what has been received so far from the client.
TCP is stream based: data arrives in the order it was sent, but there no
diff --git a/Doc/library/sys.monitoring.rst b/Doc/library/sys.monitoring.rst
index 0674074b8c0..f62a4011e41 100644
--- a/Doc/library/sys.monitoring.rst
+++ b/Doc/library/sys.monitoring.rst
@@ -137,7 +137,8 @@ The following events are supported:
.. monitoring-event:: PY_UNWIND
- Exit from a Python function during exception unwinding.
+ Exit from a Python function during exception unwinding. This includes exceptions raised directly within the
+ function and that are allowed to continue to propagate.
.. monitoring-event:: PY_YIELD
@@ -171,7 +172,7 @@ events, use the expression ``PY_RETURN | PY_START``.
if get_events(DEBUGGER_ID) == NO_EVENTS:
...
-Events are divided into three groups:
+ Setting this event deactivates all events.
.. _monitoring-event-local:
@@ -243,20 +244,23 @@ raise an exception unless it would be visible to other code.
To allow tools to monitor for real exceptions without slowing down generators
and coroutines, the :monitoring-event:`STOP_ITERATION` event is provided.
-:monitoring-event:`STOP_ITERATION` can be locally disabled, unlike :monitoring-event:`RAISE`.
+:monitoring-event:`STOP_ITERATION` can be locally disabled, unlike
+:monitoring-event:`RAISE`.
-Note that the :monitoring-event:`STOP_ITERATION` event and the :monitoring-event:`RAISE`
-event for a :exc:`StopIteration` exception are equivalent, and are treated as interchangeable
-when generating events. Implementations will favor :monitoring-event:`STOP_ITERATION` for
-performance reasons, but may generate a :monitoring-event:`RAISE` event with a :exc:`StopIteration`.
+Note that the :monitoring-event:`STOP_ITERATION` event and the
+:monitoring-event:`RAISE` event for a :exc:`StopIteration` exception are
+equivalent, and are treated as interchangeable when generating events.
+Implementations will favor :monitoring-event:`STOP_ITERATION` for performance
+reasons, but may generate a :monitoring-event:`RAISE` event with a
+:exc:`StopIteration`.
Turning events on and off
-------------------------
In order to monitor an event, it must be turned on and a corresponding callback
-must be registered.
-Events can be turned on or off by setting the events either globally or
-for a particular code object.
+must be registered. Events can be turned on or off by setting the events either
+globally and/or for a particular code object. An event will trigger only once,
+even if it is turned on both globally and locally.
Setting events globally
@@ -292,10 +296,6 @@ in Python (see :ref:`c-api-monitoring`).
Activates all the local events for *code* which are set in *event_set*.
Raises a :exc:`ValueError` if *tool_id* is not in use.
-Local events add to global events, but do not mask them.
-In other words, all global events will trigger for a code object,
-regardless of the local events.
-
Disabling events
''''''''''''''''
@@ -325,8 +325,6 @@ except for a few breakpoints.
Registering callback functions
------------------------------
-To register a callable for events call
-
.. function:: register_callback(tool_id: int, event: int, func: Callable | None, /) -> Callable | None
Registers the callable *func* for the *event* with the given *tool_id*
@@ -335,12 +333,16 @@ To register a callable for events call
it is unregistered and returned.
Otherwise :func:`register_callback` returns ``None``.
-
Functions can be unregistered by calling
``sys.monitoring.register_callback(tool_id, event, None)``.
Callback functions can be registered and unregistered at any time.
+Callbacks are called only once regardless if the event is turned on both
+globally and locally. As such, if an event could be turned on for both global
+and local events by your code then the callback needs to be written to handle
+either trigger.
+
Registering or unregistering a callback function will generate a :func:`sys.audit` event.
@@ -353,37 +355,46 @@ Callback function arguments
that there are no arguments to the call.
When an active event occurs, the registered callback function is called.
+Callback functions returning an object other than :data:`DISABLE` will have no effect.
Different events will provide the callback function with different arguments, as follows:
* :monitoring-event:`PY_START` and :monitoring-event:`PY_RESUME`::
- func(code: CodeType, instruction_offset: int) -> DISABLE | Any
+ func(code: CodeType, instruction_offset: int) -> object
* :monitoring-event:`PY_RETURN` and :monitoring-event:`PY_YIELD`::
- func(code: CodeType, instruction_offset: int, retval: object) -> DISABLE | Any
+ func(code: CodeType, instruction_offset: int, retval: object) -> object
-* :monitoring-event:`CALL`, :monitoring-event:`C_RAISE` and :monitoring-event:`C_RETURN`::
+* :monitoring-event:`CALL`, :monitoring-event:`C_RAISE` and :monitoring-event:`C_RETURN`
+ (*arg0* can be :data:`MISSING` specifically)::
- func(code: CodeType, instruction_offset: int, callable: object, arg0: object | MISSING) -> DISABLE | Any
+ func(code: CodeType, instruction_offset: int, callable: object, arg0: object) -> object
+ *code* represents the code object where the call is being made, while
+ *callable* is the object that is about to be called (and thus
+ triggered the event).
If there are no arguments, *arg0* is set to :data:`sys.monitoring.MISSING`.
+ For instance methods, *callable* will be the function object as found on the
+ class with *arg0* set to the instance (i.e. the ``self`` argument to the
+ method).
+
* :monitoring-event:`RAISE`, :monitoring-event:`RERAISE`, :monitoring-event:`EXCEPTION_HANDLED`,
:monitoring-event:`PY_UNWIND`, :monitoring-event:`PY_THROW` and :monitoring-event:`STOP_ITERATION`::
- func(code: CodeType, instruction_offset: int, exception: BaseException) -> DISABLE | Any
+ func(code: CodeType, instruction_offset: int, exception: BaseException) -> object
* :monitoring-event:`LINE`::
- func(code: CodeType, line_number: int) -> DISABLE | Any
+ func(code: CodeType, line_number: int) -> object
* :monitoring-event:`BRANCH_LEFT`, :monitoring-event:`BRANCH_RIGHT` and :monitoring-event:`JUMP`::
- func(code: CodeType, instruction_offset: int, destination_offset: int) -> DISABLE | Any
+ func(code: CodeType, instruction_offset: int, destination_offset: int) -> object
Note that the *destination_offset* is where the code will next execute.
* :monitoring-event:`INSTRUCTION`::
- func(code: CodeType, instruction_offset: int) -> DISABLE | Any
+ func(code: CodeType, instruction_offset: int) -> object
diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst
index 1626a89a073..05bc7cfb9dc 100644
--- a/Doc/library/sys.rst
+++ b/Doc/library/sys.rst
@@ -953,6 +953,8 @@ always available. Unless explicitly noted otherwise, all variables are read-only
This function should be used for internal and specialized purposes only.
It is not guaranteed to exist in all implementations of Python.
+ .. versionadded:: 3.12
+
.. function:: getobjects(limit[, type])
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index 7cec108a5bd..99e8ef7b886 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -18,8 +18,8 @@ higher-level functions in :ref:`shutil <archiving-operations>`.
Some facts and figures:
-* reads and writes :mod:`gzip`, :mod:`bz2` and :mod:`lzma` compressed archives
- if the respective modules are available.
+* reads and writes :mod:`gzip`, :mod:`bz2`, :mod:`compression.zstd`, and
+ :mod:`lzma` compressed archives if the respective modules are available.
* read/write support for the POSIX.1-1988 (ustar) format.
@@ -47,6 +47,10 @@ Some facts and figures:
or paths outside of the destination. Previously, the filter strategy
was equivalent to :func:`fully_trusted <fully_trusted_filter>`.
+.. versionchanged:: 3.14
+
+ Added support for Zstandard compression using :mod:`compression.zstd`.
+
.. function:: open(name=None, mode='r', fileobj=None, bufsize=10240, **kwargs)
Return a :class:`TarFile` object for the pathname *name*. For detailed
@@ -59,8 +63,8 @@ Some facts and figures:
+------------------+---------------------------------------------+
| mode | action |
+==================+=============================================+
- | ``'r' or 'r:*'`` | Open for reading with transparent |
- | | compression (recommended). |
+ | ``'r'`` or | Open for reading with transparent |
+ | ``'r:*'`` | compression (recommended). |
+------------------+---------------------------------------------+
| ``'r:'`` | Open for reading exclusively without |
| | compression. |
@@ -71,6 +75,8 @@ Some facts and figures:
+------------------+---------------------------------------------+
| ``'r:xz'`` | Open for reading with lzma compression. |
+------------------+---------------------------------------------+
+ | ``'r:zst'`` | Open for reading with Zstandard compression.|
+ +------------------+---------------------------------------------+
| ``'x'`` or | Create a tarfile exclusively without |
| ``'x:'`` | compression. |
| | Raise a :exc:`FileExistsError` exception |
@@ -88,10 +94,15 @@ Some facts and figures:
| | Raise a :exc:`FileExistsError` exception |
| | if it already exists. |
+------------------+---------------------------------------------+
- | ``'a' or 'a:'`` | Open for appending with no compression. The |
- | | file is created if it does not exist. |
+ | ``'x:zst'`` | Create a tarfile with Zstandard compression.|
+ | | Raise a :exc:`FileExistsError` exception |
+ | | if it already exists. |
+ +------------------+---------------------------------------------+
+ | ``'a'`` or | Open for appending with no compression. The |
+ | ``'a:'`` | file is created if it does not exist. |
+------------------+---------------------------------------------+
- | ``'w' or 'w:'`` | Open for uncompressed writing. |
+ | ``'w'`` or | Open for uncompressed writing. |
+ | ``'w:'`` | |
+------------------+---------------------------------------------+
| ``'w:gz'`` | Open for gzip compressed writing. |
+------------------+---------------------------------------------+
@@ -99,6 +110,8 @@ Some facts and figures:
+------------------+---------------------------------------------+
| ``'w:xz'`` | Open for lzma compressed writing. |
+------------------+---------------------------------------------+
+ | ``'w:zst'`` | Open for Zstandard compressed writing. |
+ +------------------+---------------------------------------------+
Note that ``'a:gz'``, ``'a:bz2'`` or ``'a:xz'`` is not possible. If *mode*
is not suitable to open a certain (compressed) file for reading,
@@ -115,6 +128,15 @@ Some facts and figures:
For modes ``'w:xz'``, ``'x:xz'`` and ``'w|xz'``, :func:`tarfile.open` accepts the
keyword argument *preset* to specify the compression level of the file.
+ For modes ``'w:zst'``, ``'x:zst'`` and ``'w|zst'``, :func:`tarfile.open`
+ accepts the keyword argument *level* to specify the compression level of
+ the file. The keyword argument *options* may also be passed, providing
+ advanced Zstandard compression parameters described by
+ :class:`~compression.zstd.CompressionParameter`. The keyword argument
+ *zstd_dict* can be passed to provide a :class:`~compression.zstd.ZstdDict`,
+ a Zstandard dictionary used to improve compression of smaller amounts of
+ data.
+
For special purposes, there is a second format for *mode*:
``'filemode|[compression]'``. :func:`tarfile.open` will return a :class:`TarFile`
object that processes its data as a stream of blocks. No random seeking will
@@ -146,6 +168,9 @@ Some facts and figures:
| ``'r|xz'`` | Open an lzma compressed *stream* for |
| | reading. |
+-------------+--------------------------------------------+
+ | ``'r|zst'`` | Open a Zstandard compressed *stream* for |
+ | | reading. |
+ +-------------+--------------------------------------------+
| ``'w|'`` | Open an uncompressed *stream* for writing. |
+-------------+--------------------------------------------+
| ``'w|gz'`` | Open a gzip compressed *stream* for |
@@ -157,6 +182,9 @@ Some facts and figures:
| ``'w|xz'`` | Open an lzma compressed *stream* for |
| | writing. |
+-------------+--------------------------------------------+
+ | ``'w|zst'`` | Open a Zstandard compressed *stream* for |
+ | | writing. |
+ +-------------+--------------------------------------------+
.. versionchanged:: 3.5
The ``'x'`` (exclusive creation) mode was added.
diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst
index 52fefd590da..cabb41442f8 100644
--- a/Doc/library/threading.rst
+++ b/Doc/library/threading.rst
@@ -621,7 +621,7 @@ since it is impossible to detect the termination of alien threads.
an error to :meth:`~Thread.join` a thread before it has been started
and attempts to do so raise the same exception.
- If an attempt is made to join a running daemonic thread in in late stages
+ If an attempt is made to join a running daemonic thread in late stages
of :term:`Python finalization <interpreter shutdown>` :meth:`!join`
raises a :exc:`PythonFinalizationError`.
diff --git a/Doc/library/time.rst b/Doc/library/time.rst
index 542493a82af..29b695a9b19 100644
--- a/Doc/library/time.rst
+++ b/Doc/library/time.rst
@@ -712,13 +712,18 @@ Functions
Clock:
- * On Windows, call ``GetSystemTimeAsFileTime()``.
+ * On Windows, call ``GetSystemTimePreciseAsFileTime()``.
* Call ``clock_gettime(CLOCK_REALTIME)`` if available.
* Otherwise, call ``gettimeofday()``.
Use :func:`time_ns` to avoid the precision loss caused by the :class:`float`
type.
+.. versionchanged:: 3.13
+
+ On Windows, calls ``GetSystemTimePreciseAsFileTime()`` instead of
+ ``GetSystemTimeAsFileTime()``.
+
.. function:: time_ns() -> int
diff --git a/Doc/library/unittest.rst b/Doc/library/unittest.rst
index dcdda1719bf..d526e835caa 100644
--- a/Doc/library/unittest.rst
+++ b/Doc/library/unittest.rst
@@ -1131,7 +1131,7 @@ Test cases
.. versionchanged:: 3.3
Added the *msg* keyword argument when used as a context manager.
- .. method:: assertLogs(logger=None, level=None)
+ .. method:: assertLogs(logger=None, level=None, formatter=None)
A context manager to test that at least one message is logged on
the *logger* or one of its children, with at least the given
@@ -1146,6 +1146,10 @@ Test cases
its string equivalent (for example either ``"ERROR"`` or
:const:`logging.ERROR`). The default is :const:`logging.INFO`.
+ If given, *formatter* should be a :class:`logging.Formatter` object.
+ The default is a formatter with format string
+ ``"%(levelname)s:%(name)s:%(message)s"``
+
The test passes if at least one message emitted inside the ``with``
block matches the *logger* and *level* conditions, otherwise it fails.
@@ -1173,6 +1177,9 @@ Test cases
.. versionadded:: 3.4
+ .. versionchanged:: next
+ Now accepts a *formatter* to control how messages are formatted.
+
.. method:: assertNoLogs(logger=None, level=None)
A context manager to test that no messages are logged on
diff --git a/Doc/library/venv.rst b/Doc/library/venv.rst
index bed799aedfd..f16e24eac08 100644
--- a/Doc/library/venv.rst
+++ b/Doc/library/venv.rst
@@ -105,36 +105,52 @@ The command, if run with ``-h``, will show the available options::
Creates virtual Python environments in one or more target directories.
- positional arguments:
- ENV_DIR A directory to create the environment in.
-
- options:
- -h, --help show this help message and exit
- --system-site-packages
- Give the virtual environment access to the system
- site-packages dir.
- --symlinks Try to use symlinks rather than copies, when
- symlinks are not the default for the platform.
- --copies Try to use copies rather than symlinks, even when
- symlinks are the default for the platform.
- --clear Delete the contents of the environment directory
- if it already exists, before environment creation.
- --upgrade Upgrade the environment directory to use this
- version of Python, assuming Python has been
- upgraded in-place.
- --without-pip Skips installing or upgrading pip in the virtual
- environment (pip is bootstrapped by default)
- --prompt PROMPT Provides an alternative prompt prefix for this
- environment.
- --upgrade-deps Upgrade core dependencies (pip) to the latest
- version in PyPI
- --without-scm-ignore-files
- Skips adding SCM ignore files to the environment
- directory (Git is supported by default).
-
Once an environment has been created, you may wish to activate it, e.g. by
sourcing an activate script in its bin directory.
+.. _venv-cli:
+.. program:: venv
+
+.. option:: ENV_DIR
+
+ A required argument specifying the directory to create the environment in.
+
+.. option:: --system-site-packages
+
+ Give the virtual environment access to the system site-packages directory.
+
+.. option:: --symlinks
+
+ Try to use symlinks rather than copies, when symlinks are not the default for the platform.
+
+.. option:: --copies
+
+ Try to use copies rather than symlinks, even when symlinks are the default for the platform.
+
+.. option:: --clear
+
+ Delete the contents of the environment directory if it already exists, before environment creation.
+
+.. option:: --upgrade
+
+ Upgrade the environment directory to use this version of Python, assuming Python has been upgraded in-place.
+
+.. option:: --without-pip
+
+ Skips installing or upgrading pip in the virtual environment (pip is bootstrapped by default).
+
+.. option:: --prompt <PROMPT>
+
+ Provides an alternative prompt prefix for this environment.
+
+.. option:: --upgrade-deps
+
+ Upgrade core dependencies (pip) to the latest version in PyPI.
+
+.. option:: --without-scm-ignore-files
+
+ Skips adding SCM ignore files to the environment directory (Git is supported by default).
+
.. versionchanged:: 3.4
Installs pip by default, added the ``--without-pip`` and ``--copies``
diff --git a/Doc/library/xml.dom.minidom.rst b/Doc/library/xml.dom.minidom.rst
index 00a18751207..9ffedf7366a 100644
--- a/Doc/library/xml.dom.minidom.rst
+++ b/Doc/library/xml.dom.minidom.rst
@@ -19,11 +19,10 @@ not already proficient with the DOM should consider using the
:mod:`xml.etree.ElementTree` module for their XML processing instead.
-.. warning::
+.. note::
- The :mod:`xml.dom.minidom` module is not secure against
- maliciously constructed data. If you need to parse untrusted or
- unauthenticated data see :ref:`xml-vulnerabilities`.
+ If you need to parse untrusted or unauthenticated data, see
+ :ref:`xml-security`.
DOM applications typically start by parsing some XML into a DOM. With
diff --git a/Doc/library/xml.dom.pulldom.rst b/Doc/library/xml.dom.pulldom.rst
index fd96765cbe3..8bceeecd463 100644
--- a/Doc/library/xml.dom.pulldom.rst
+++ b/Doc/library/xml.dom.pulldom.rst
@@ -19,11 +19,10 @@ responsible for explicitly pulling events from the stream, looping over those
events until either processing is finished or an error condition occurs.
-.. warning::
+.. note::
- The :mod:`xml.dom.pulldom` module is not secure against
- maliciously constructed data. If you need to parse untrusted or
- unauthenticated data see :ref:`xml-vulnerabilities`.
+ If you need to parse untrusted or unauthenticated data, see
+ :ref:`xml-security`.
.. versionchanged:: 3.7.1
diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst
index 1daf6628013..00075ac2a23 100644
--- a/Doc/library/xml.etree.elementtree.rst
+++ b/Doc/library/xml.etree.elementtree.rst
@@ -20,11 +20,10 @@ for parsing and creating XML data.
The :mod:`!xml.etree.cElementTree` module is deprecated.
-.. warning::
+.. note::
- The :mod:`xml.etree.ElementTree` module is not secure against
- maliciously constructed data. If you need to parse untrusted or
- unauthenticated data see :ref:`xml-vulnerabilities`.
+ If you need to parse untrusted or unauthenticated data, see
+ :ref:`xml-security`.
Tutorial
--------
diff --git a/Doc/library/xml.rst b/Doc/library/xml.rst
index d4959953989..28465219a1a 100644
--- a/Doc/library/xml.rst
+++ b/Doc/library/xml.rst
@@ -15,12 +15,10 @@ XML Processing Modules
Python's interfaces for processing XML are grouped in the ``xml`` package.
-.. warning::
+.. note::
- The XML modules are not secure against erroneous or maliciously
- constructed data. If you need to parse untrusted or
- unauthenticated data see the :ref:`xml-vulnerabilities` and
- :ref:`defusedxml-package` sections.
+ If you need to parse untrusted or unauthenticated data, see
+ :ref:`xml-security`.
It is important to note that modules in the :mod:`xml` package require that
there be at least one SAX-compliant XML parser available. The Expat parser is
@@ -47,46 +45,22 @@ The XML handling submodules are:
* :mod:`xml.parsers.expat`: the Expat parser binding
+.. _xml-security:
.. _xml-vulnerabilities:
-XML vulnerabilities
--------------------
+XML security
+------------
-The XML processing modules are not secure against maliciously constructed data.
An attacker can abuse XML features to carry out denial of service attacks,
access local files, generate network connections to other machines, or
circumvent firewalls.
-The following table gives an overview of the known attacks and whether
-the various modules are vulnerable to them.
-
-========================= ================== ================== ================== ================== ==================
-kind sax etree minidom pulldom xmlrpc
-========================= ================== ================== ================== ================== ==================
-billion laughs **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1)
-quadratic blowup **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1) **Vulnerable** (1)
-external entity expansion Safe (5) Safe (2) Safe (3) Safe (5) Safe (4)
-`DTD`_ retrieval Safe (5) Safe Safe Safe (5) Safe
-decompression bomb Safe Safe Safe Safe **Vulnerable**
-large tokens **Vulnerable** (6) **Vulnerable** (6) **Vulnerable** (6) **Vulnerable** (6) **Vulnerable** (6)
-========================= ================== ================== ================== ================== ==================
-
-1. Expat 2.4.1 and newer is not vulnerable to the "billion laughs" and
- "quadratic blowup" vulnerabilities. Items still listed as vulnerable due to
- potential reliance on system-provided libraries. Check
- :const:`!pyexpat.EXPAT_VERSION`.
-2. :mod:`xml.etree.ElementTree` doesn't expand external entities and raises a
- :exc:`~xml.etree.ElementTree.ParseError` when an entity occurs.
-3. :mod:`xml.dom.minidom` doesn't expand external entities and simply returns
- the unexpanded entity verbatim.
-4. :mod:`xmlrpc.client` doesn't expand external entities and omits them.
-5. Since Python 3.7.1, external general entities are no longer processed by
- default.
-6. Expat 2.6.0 and newer is not vulnerable to denial of service
- through quadratic runtime caused by parsing large tokens.
- Items still listed as vulnerable due to
- potential reliance on system-provided libraries. Check
- :const:`!pyexpat.EXPAT_VERSION`.
+Expat versions lower that 2.6.0 may be vulnerable to "billion laughs",
+"quadratic blowup" and "large tokens". Python may be vulnerable if it uses such
+older versions of Expat as a system-provided library.
+Check :const:`!pyexpat.EXPAT_VERSION`.
+
+:mod:`xmlrpc` is **vulnerable** to the "decompression bomb" attack.
billion laughs / exponential entity expansion
@@ -103,16 +77,6 @@ quadratic blowup entity expansion
efficient as the exponential case but it avoids triggering parser countermeasures
that forbid deeply nested entities.
-external entity expansion
- Entity declarations can contain more than just text for replacement. They can
- also point to external resources or local files. The XML
- parser accesses the resource and embeds the content into the XML document.
-
-`DTD`_ retrieval
- Some XML libraries like Python's :mod:`xml.dom.pulldom` retrieve document type
- definitions from remote or local locations. The feature has similar
- implications as the external entity expansion issue.
-
decompression bomb
Decompression bombs (aka `ZIP bomb`_) apply to all XML libraries
that can parse compressed XML streams such as gzipped HTTP streams or
@@ -126,21 +90,5 @@ large tokens
be used to cause denial of service in the application parsing XML.
The issue is known as :cve:`2023-52425`.
-The documentation for :pypi:`defusedxml` on PyPI has further information about
-all known attack vectors with examples and references.
-
-.. _defusedxml-package:
-
-The :mod:`!defusedxml` Package
-------------------------------
-
-:pypi:`defusedxml` is a pure Python package with modified subclasses of all stdlib
-XML parsers that prevent any potentially malicious operation. Use of this
-package is recommended for any server code that parses untrusted XML data. The
-package also ships with example exploits and extended documentation on more
-XML exploits such as XPath injection.
-
-
.. _Billion Laughs: https://en.wikipedia.org/wiki/Billion_laughs
.. _ZIP bomb: https://en.wikipedia.org/wiki/Zip_bomb
-.. _DTD: https://en.wikipedia.org/wiki/Document_type_definition
diff --git a/Doc/library/xml.sax.rst b/Doc/library/xml.sax.rst
index c60e9e505f7..5fa92645a44 100644
--- a/Doc/library/xml.sax.rst
+++ b/Doc/library/xml.sax.rst
@@ -18,11 +18,10 @@ SAX exceptions and the convenience functions which will be most used by users of
the SAX API.
-.. warning::
+.. note::
- The :mod:`xml.sax` module is not secure against maliciously
- constructed data. If you need to parse untrusted or unauthenticated data see
- :ref:`xml-vulnerabilities`.
+ If you need to parse untrusted or unauthenticated data, see
+ :ref:`xml-security`.
.. versionchanged:: 3.7.1
diff --git a/Doc/library/xmlrpc.client.rst b/Doc/library/xmlrpc.client.rst
index 654154cb43d..547cb50be78 100644
--- a/Doc/library/xmlrpc.client.rst
+++ b/Doc/library/xmlrpc.client.rst
@@ -24,8 +24,8 @@ between conformable Python objects and XML on the wire.
.. warning::
The :mod:`xmlrpc.client` module is not secure against maliciously
- constructed data. If you need to parse untrusted or unauthenticated data see
- :ref:`xml-vulnerabilities`.
+ constructed data. If you need to parse untrusted or unauthenticated data,
+ see :ref:`xml-security`.
.. versionchanged:: 3.5
diff --git a/Doc/library/xmlrpc.server.rst b/Doc/library/xmlrpc.server.rst
index 06169c7eca8..2a8f6f8d5fc 100644
--- a/Doc/library/xmlrpc.server.rst
+++ b/Doc/library/xmlrpc.server.rst
@@ -20,8 +20,8 @@ servers written in Python. Servers can either be free standing, using
.. warning::
The :mod:`xmlrpc.server` module is not secure against maliciously
- constructed data. If you need to parse untrusted or unauthenticated data see
- :ref:`xml-vulnerabilities`.
+ constructed data. If you need to parse untrusted or unauthenticated data,
+ see :ref:`xml-security`.
.. include:: ../includes/wasm-notavail.rst
diff --git a/Doc/library/zipfile.rst b/Doc/library/zipfile.rst
index 6a4fa67332e..bf9136a2139 100644
--- a/Doc/library/zipfile.rst
+++ b/Doc/library/zipfile.rst
@@ -129,14 +129,28 @@ The module defines the following items:
.. versionadded:: 3.3
+.. data:: ZIP_ZSTANDARD
+
+ The numeric constant for Zstandard compression. This requires the
+ :mod:`compression.zstd` module.
+
.. note::
- The ZIP file format specification has included support for bzip2 compression
- since 2001, and for LZMA compression since 2006. However, some tools
- (including older Python releases) do not support these compression
- methods, and may either refuse to process the ZIP file altogether,
- or fail to extract individual files.
+ In APPNOTE 6.3.7, the method ID ``20`` was assigned to Zstandard
+ compression. This was changed in APPNOTE 6.3.8 to method ID ``93`` to
+ avoid conflicts, with method ID ``20`` being deprecated. For
+ compatibility, the :mod:`!zipfile` module reads both method IDs but will
+ only write data with method ID ``93``.
+
+ .. versionadded:: 3.14
+
+.. note::
+ The ZIP file format specification has included support for bzip2 compression
+ since 2001, for LZMA compression since 2006, and Zstandard compression since
+ 2020. However, some tools (including older Python releases) do not support
+ these compression methods, and may either refuse to process the ZIP file
+ altogether, or fail to extract individual files.
.. seealso::
@@ -176,10 +190,11 @@ ZipFile Objects
*compression* is the ZIP compression method to use when writing the archive,
and should be :const:`ZIP_STORED`, :const:`ZIP_DEFLATED`,
- :const:`ZIP_BZIP2` or :const:`ZIP_LZMA`; unrecognized
- values will cause :exc:`NotImplementedError` to be raised. If
- :const:`ZIP_DEFLATED`, :const:`ZIP_BZIP2` or :const:`ZIP_LZMA` is specified
- but the corresponding module (:mod:`zlib`, :mod:`bz2` or :mod:`lzma`) is not
+ :const:`ZIP_BZIP2`, :const:`ZIP_LZMA`, or :const:`ZIP_ZSTANDARD`;
+ unrecognized values will cause :exc:`NotImplementedError` to be raised. If
+ :const:`ZIP_DEFLATED`, :const:`ZIP_BZIP2`, :const:`ZIP_LZMA`, or
+ :const:`ZIP_ZSTANDARD` is specified but the corresponding module
+ (:mod:`zlib`, :mod:`bz2`, :mod:`lzma`, or :mod:`compression.zstd`) is not
available, :exc:`RuntimeError` is raised. The default is :const:`ZIP_STORED`.
If *allowZip64* is ``True`` (the default) zipfile will create ZIP files that
@@ -194,6 +209,10 @@ ZipFile Objects
(see :class:`zlib <zlib.compressobj>` for more information).
When using :const:`ZIP_BZIP2` integers ``1`` through ``9`` are accepted
(see :class:`bz2 <bz2.BZ2File>` for more information).
+ When using :const:`ZIP_ZSTANDARD` integers ``-131072`` through ``22`` are
+ commonly accepted (see
+ :attr:`CompressionParameter.compression_level <compression.zstd.CompressionParameter.compression_level>`
+ for more on retrieving valid values and their meaning).
The *strict_timestamps* argument, when set to ``False``, allows to
zip files older than 1980-01-01 at the cost of setting the
@@ -415,9 +434,10 @@ ZipFile Objects
read or append. *pwd* is the password used for encrypted files as a :class:`bytes`
object and, if specified, overrides the default password set with :meth:`setpassword`.
Calling :meth:`read` on a ZipFile that uses a compression method other than
- :const:`ZIP_STORED`, :const:`ZIP_DEFLATED`, :const:`ZIP_BZIP2` or
- :const:`ZIP_LZMA` will raise a :exc:`NotImplementedError`. An error will also
- be raised if the corresponding compression module is not available.
+ :const:`ZIP_STORED`, :const:`ZIP_DEFLATED`, :const:`ZIP_BZIP2`,
+ :const:`ZIP_LZMA`, or :const:`ZIP_ZSTANDARD` will raise a
+ :exc:`NotImplementedError`. An error will also be raised if the
+ corresponding compression module is not available.
.. versionchanged:: 3.6
Calling :meth:`read` on a closed ZipFile will raise a :exc:`ValueError`.
diff --git a/Doc/tools/.nitignore b/Doc/tools/.nitignore
index e3bcb968128..eb760e0fac5 100644
--- a/Doc/tools/.nitignore
+++ b/Doc/tools/.nitignore
@@ -15,7 +15,6 @@ Doc/extending/extending.rst
Doc/library/ast.rst
Doc/library/asyncio-extending.rst
Doc/library/email.charset.rst
-Doc/library/email.compat32-message.rst
Doc/library/email.parser.rst
Doc/library/exceptions.rst
Doc/library/functools.rst
@@ -73,6 +72,5 @@ Doc/whatsnew/3.5.rst
Doc/whatsnew/3.6.rst
Doc/whatsnew/3.7.rst
Doc/whatsnew/3.8.rst
-Doc/whatsnew/3.9.rst
Doc/whatsnew/3.10.rst
Doc/whatsnew/3.11.rst
diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst
index a5867b489e0..cad49e2deeb 100644
--- a/Doc/using/cmdline.rst
+++ b/Doc/using/cmdline.rst
@@ -653,7 +653,7 @@ Miscellaneous options
.. versionadded:: 3.13
* :samp:`-X thread_inherit_context={0,1}` causes :class:`~threading.Thread`
- to, by default, use a copy of context of of the caller of
+ to, by default, use a copy of context of the caller of
``Thread.start()`` when starting. Otherwise, threads will start
with an empty context. If unset, the value of this option defaults
to ``1`` on free-threaded builds and to ``0`` otherwise. See also
@@ -1284,7 +1284,7 @@ conflict.
.. envvar:: PYTHON_THREAD_INHERIT_CONTEXT
If this variable is set to ``1`` then :class:`~threading.Thread` will,
- by default, use a copy of context of of the caller of ``Thread.start()``
+ by default, use a copy of context of the caller of ``Thread.start()``
when starting. Otherwise, new threads will start with an empty context.
If unset, this variable defaults to ``1`` on free-threaded builds and to
``0`` otherwise. See also :option:`-X thread_inherit_context<-X>`.
diff --git a/Doc/using/configure.rst b/Doc/using/configure.rst
index df81a330549..e5fe3c72b1b 100644
--- a/Doc/using/configure.rst
+++ b/Doc/using/configure.rst
@@ -29,6 +29,9 @@ Features and minimum versions required to build CPython:
* Tcl/Tk 8.5.12 for the :mod:`tkinter` module.
+* `libmpdec <https://www.bytereef.org/mpdecimal/doc/libmpdec/>`_ 2.5.0
+ for the :mod:`decimal` module.
+
* Autoconf 2.72 and aclocal 1.16.5 are required to regenerate the
:file:`configure` script.
@@ -683,6 +686,13 @@ also be used to improve performance.
not compiled. This includes both the functionality to schedule code to be executed
and the functionality to receive code to be executed.
+ .. c:macro:: Py_REMOTE_DEBUG
+
+ This macro is defined by default, unless Python is configured with
+ :option:`--without-remote-debug`.
+
+ Note that even if the macro is defined, remote debugging may not be
+ available (for example, on an incompatible platform).
.. versionadded:: 3.14
diff --git a/Doc/using/ios.rst b/Doc/using/ios.rst
index 7d5c6331bef..0fb28f8c866 100644
--- a/Doc/using/ios.rst
+++ b/Doc/using/ios.rst
@@ -298,9 +298,9 @@ To add Python to an iOS Xcode project:
* Signal handlers (:c:member:`PyConfig.install_signal_handlers`) are *enabled*;
* System logging (:c:member:`PyConfig.use_system_logger`) is *enabled*
(optional, but strongly recommended; this is enabled by default);
- * ``PYTHONHOME`` for the interpreter is configured to point at the
+ * :envvar:`PYTHONHOME` for the interpreter is configured to point at the
``python`` subfolder of your app's bundle; and
- * The ``PYTHONPATH`` for the interpreter includes:
+ * The :envvar:`PYTHONPATH` for the interpreter includes:
- the ``python/lib/python3.X`` subfolder of your app's bundle,
- the ``python/lib/python3.X/lib-dynload`` subfolder of your app's bundle, and
@@ -324,7 +324,12 @@ modules in your app, some additional steps will be required:
the ``lib-dynload`` folder can be copied and adapted for this purpose.
* If you're using a separate folder for third-party packages, ensure that folder
- is included as part of the ``PYTHONPATH`` configuration in step 10.
+ is included as part of the :envvar:`PYTHONPATH` configuration in step 10.
+
+* If any of the folders that contain third-party packages will contain ``.pth``
+ files, you should add that folder as a *site directory* (using
+ :meth:`site.addsitedir`), rather than adding to :envvar:`PYTHONPATH` or
+ :attr:`sys.path` directly.
Testing a Python package
------------------------
diff --git a/Doc/whatsnew/3.12.rst b/Doc/whatsnew/3.12.rst
index a65f59c0a72..7cfdc287b7f 100644
--- a/Doc/whatsnew/3.12.rst
+++ b/Doc/whatsnew/3.12.rst
@@ -2233,6 +2233,8 @@ Deprecated
.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst
+.. include:: ../deprecations/c-api-pending-removal-in-3.16.rst
+
.. include:: ../deprecations/c-api-pending-removal-in-future.rst
Removed
diff --git a/Doc/whatsnew/3.13.rst b/Doc/whatsnew/3.13.rst
index 580a3d8154d..0a3b3b30e01 100644
--- a/Doc/whatsnew/3.13.rst
+++ b/Doc/whatsnew/3.13.rst
@@ -1996,7 +1996,7 @@ New Deprecations
(Contributed by Alex Waygood in :gh:`105566` and :gh:`105570`.)
* Deprecate the :func:`typing.no_type_check_decorator` decorator function,
- to be removed in in Python 3.15.
+ to be removed in Python 3.15.
After eight years in the :mod:`typing` module,
it has yet to be supported by any major type checker.
(Contributed by Alex Waygood in :gh:`106309`.)
@@ -2546,6 +2546,8 @@ Deprecated C APIs
.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst
+.. include:: ../deprecations/c-api-pending-removal-in-3.16.rst
+
.. include:: ../deprecations/c-api-pending-removal-in-3.18.rst
.. include:: ../deprecations/c-api-pending-removal-in-future.rst
@@ -2592,7 +2594,7 @@ Build Changes
* The :file:`configure` option :option:`--with-system-libmpdec`
now defaults to ``yes``.
- The bundled copy of ``libmpdecimal`` will be removed in Python 3.15.
+ The bundled copy of ``libmpdec`` will be removed in Python 3.16.
* Python built with :file:`configure` :option:`--with-trace-refs`
(tracing references) is now ABI compatible with the Python release build
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index a74d414ae4b..c108a94692d 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -2640,7 +2640,9 @@ pty
sqlite3
-------
-* Remove :data:`!version` and :data:`!version_info` from :mod:`sqlite3`.
+* Remove :data:`!version` and :data:`!version_info` from :mod:`sqlite3`;
+ use :data:`~sqlite3.sqlite_version` and :data:`~sqlite3.sqlite_version_info`
+ for the actual version number of the runtime SQLite library.
(Contributed by Hugo van Kemenade in :gh:`118924`.)
* Disallow using a sequence of parameters with named placeholders.
@@ -3051,6 +3053,8 @@ Deprecated
.. include:: ../deprecations/c-api-pending-removal-in-3.15.rst
+.. include:: ../deprecations/c-api-pending-removal-in-3.16.rst
+
.. include:: ../deprecations/c-api-pending-removal-in-3.18.rst
.. include:: ../deprecations/c-api-pending-removal-in-future.rst
diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst
index 9f327cf904d..706a816f888 100644
--- a/Doc/whatsnew/3.15.rst
+++ b/Doc/whatsnew/3.15.rst
@@ -115,6 +115,9 @@ math
* Add :func:`math.isnormal` and :func:`math.issubnormal` functions.
(Contributed by Sergey B Kirpichev in :gh:`132908`.)
+* Add :func:`math.signbit` function.
+ (Contributed by Bénédikt Tran in :gh:`135853`.)
+
os.path
-------
@@ -137,8 +140,15 @@ shelve
sqlite3
-------
-* Support SQL keyword completion in the :mod:`sqlite3` command-line interface.
- (Contributed by Long Tan in :gh:`133393`.)
+* The :ref:`command-line interface <sqlite3-cli>` has several new features:
+
+ * SQL keyword completion on <tab>.
+ (Contributed by Long Tan in :gh:`133393`.)
+
+ * Prompts, error messages, and help text are now colored.
+ This is enabled by default, see :ref:`using-on-controlling-color` for
+ details.
+ (Contributed by Stan Ulbrych and Łukasz Langa in :gh:`133461`)
ssl
@@ -242,6 +252,13 @@ platform
(Contributed by Alexey Makridenko in :gh:`133604`.)
+sre_*
+-----
+
+* Removed :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse` modules.
+ (Contributed by Stan Ulbrych in :gh:`135994`.)
+
+
sysconfig
---------
@@ -274,6 +291,15 @@ typing
(Contributed by Bénédikt Tran in :gh:`133823`.)
+unittest
+--------
+
+* Lets users specify formatter in TestCase.assertLogs.
+ :func:`unittest.TestCase.assertLogs` will now accept a formatter
+ to control how messages are formatted.
+ (Contributed by Garry Cairns in :gh:`134567`.)
+
+
wave
----
@@ -293,6 +319,11 @@ that may require changes to your code.
Build changes
=============
+* Removed implicit fallback to the bundled copy of the ``libmpdec`` library.
+ Now this should be explicitly enabled with :option:`--with-system-libmpdec`
+ set to ``no`` or with :option:`!--without-system-libmpdec`.
+ (Contributed by Sergey B Kirpichev in :gh:`115119`.)
+
C API changes
=============
@@ -324,6 +355,13 @@ Porting to Python 3.15
(Contributed by Serhiy Storchaka in :gh:`133595`.)
+* Private functions promoted to public C APIs:
+
+ * ``PyMutex_IsLocked()`` : :c:func:`PyMutex_IsLocked`
+
+ The |pythoncapi_compat_project| can be used to get most of these new
+ functions on Python 3.14 and older.
+
Deprecated C APIs
-----------------
diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst
index 896e8f4a489..7fd9e6ac66e 100644
--- a/Doc/whatsnew/3.9.rst
+++ b/Doc/whatsnew/3.9.rst
@@ -423,8 +423,8 @@ digests. It skips MD5 on platforms that block MD5 digest.
fcntl
-----
-Added constants :const:`~fcntl.F_OFD_GETLK`, :const:`~fcntl.F_OFD_SETLK`
-and :const:`~fcntl.F_OFD_SETLKW`.
+Added constants :const:`!fcntl.F_OFD_GETLK`, :const:`!fcntl.F_OFD_SETLK`
+and :const:`!fcntl.F_OFD_SETLKW`.
(Contributed by Donghee Na in :issue:`38602`.)
ftplib
@@ -644,7 +644,7 @@ attribute.
random
------
-Added a new :attr:`random.Random.randbytes` method: generate random bytes.
+Added a new :meth:`random.Random.randbytes` method: generate random bytes.
(Contributed by Victor Stinner in :issue:`40286`.)
signal
@@ -776,7 +776,7 @@ Optimizations
:pep:`590` vectorcall protocol.
(Contributed by Donghee Na, Mark Shannon, Jeroen Demeyer and Petr Viktorin in :issue:`37207`.)
-* Optimized :func:`~set.difference_update` for the case when the other set
+* Optimized :meth:`!set.difference_update` for the case when the other set
is much larger than the base set.
(Suggested by Evgeny Kapun with code contributed by Michele Orrù in :issue:`8425`.)
@@ -1139,7 +1139,7 @@ Changes in the Python API
(Contributed by Christian Heimes in :issue:`36384`).
* :func:`codecs.lookup` now normalizes the encoding name the same way as
- :func:`encodings.normalize_encoding`, except that :func:`codecs.lookup` also
+ :func:`!encodings.normalize_encoding`, except that :func:`codecs.lookup` also
converts the name to lower case. For example, ``"latex+latin1"`` encoding
name is now normalized to ``"latex_latin1"``.
(Contributed by Jordon Xu in :issue:`37751`.)
diff --git a/Grammar/python.gram b/Grammar/python.gram
index a5ab769910b..d1af7704e9b 100644
--- a/Grammar/python.gram
+++ b/Grammar/python.gram
@@ -184,8 +184,9 @@ return_stmt[stmt_ty]:
| 'return' a=[star_expressions] { _PyAST_Return(a, EXTRA) }
raise_stmt[stmt_ty]:
+ | 'raise' a=expression 'from' b=expression { _PyAST_Raise(a, b, EXTRA) }
| invalid_raise_stmt
- | 'raise' a=expression b=['from' z=expression { z }] { _PyAST_Raise(a, b, EXTRA) }
+ | 'raise' a=expression { _PyAST_Raise(a, NULL, EXTRA) }
| 'raise' { _PyAST_Raise(NULL, NULL, EXTRA) }
pass_stmt[stmt_ty]:
diff --git a/Include/cpython/lock.h b/Include/cpython/lock.h
index 8ee03e82f74..63886fca28e 100644
--- a/Include/cpython/lock.h
+++ b/Include/cpython/lock.h
@@ -36,6 +36,9 @@ PyAPI_FUNC(void) PyMutex_Lock(PyMutex *m);
// exported function for unlocking the mutex
PyAPI_FUNC(void) PyMutex_Unlock(PyMutex *m);
+// exported function for checking if the mutex is locked
+PyAPI_FUNC(int) PyMutex_IsLocked(PyMutex *m);
+
// Locks the mutex.
//
// If the mutex is currently locked, the calling thread will be parked until
@@ -61,3 +64,11 @@ _PyMutex_Unlock(PyMutex *m)
}
}
#define PyMutex_Unlock _PyMutex_Unlock
+
+// Checks if the mutex is currently locked.
+static inline int
+_PyMutex_IsLocked(PyMutex *m)
+{
+ return (_Py_atomic_load_uint8(&m->_bits) & _Py_LOCKED) != 0;
+}
+#define PyMutex_IsLocked _PyMutex_IsLocked
diff --git a/Include/internal/mimalloc/mimalloc/types.h b/Include/internal/mimalloc/mimalloc/types.h
index a04169f7fb8..a17f637fe68 100644
--- a/Include/internal/mimalloc/mimalloc/types.h
+++ b/Include/internal/mimalloc/mimalloc/types.h
@@ -705,7 +705,7 @@ void _mi_stat_counter_increase(mi_stat_counter_t* stat, size_t amount);
// Thread Local data
// ------------------------------------------------------
-// A "span" is is an available range of slices. The span queues keep
+// A "span" is an available range of slices. The span queues keep
// track of slice spans of at most the given `slice_count` (but more than the previous size class).
typedef struct mi_span_queue_s {
mi_slice_t* first;
diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h
index e118b86db50..c461bc1786d 100644
--- a/Include/internal/pycore_global_objects_fini_generated.h
+++ b/Include/internal/pycore_global_objects_fini_generated.h
@@ -1136,6 +1136,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(offset_src));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(on_type_read));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(onceregistry));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(only_active_thread));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(only_keys));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(oparg));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(opcode));
diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h
index 36f3d23d095..72c2051bd97 100644
--- a/Include/internal/pycore_global_strings.h
+++ b/Include/internal/pycore_global_strings.h
@@ -627,6 +627,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(offset_src)
STRUCT_FOR_ID(on_type_read)
STRUCT_FOR_ID(onceregistry)
+ STRUCT_FOR_ID(only_active_thread)
STRUCT_FOR_ID(only_keys)
STRUCT_FOR_ID(oparg)
STRUCT_FOR_ID(opcode)
diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h
index 32b60cc33a2..585120108cf 100644
--- a/Include/internal/pycore_lock.h
+++ b/Include/internal/pycore_lock.h
@@ -25,13 +25,6 @@ PyMutex_LockFast(PyMutex *m)
return _Py_atomic_compare_exchange_uint8(lock_bits, &expected, _Py_LOCKED);
}
-// Checks if the mutex is currently locked.
-static inline int
-PyMutex_IsLocked(PyMutex *m)
-{
- return (_Py_atomic_load_uint8(&m->_bits) & _Py_LOCKED) != 0;
-}
-
// Re-initializes the mutex after a fork to the unlocked state.
static inline void
_PyMutex_at_fork_reinit(PyMutex *m)
@@ -51,6 +44,11 @@ typedef enum _PyLockFlags {
// Fail if interrupted by a signal while waiting on the lock.
_PY_FAIL_IF_INTERRUPTED = 4,
+
+ // Locking & unlocking this lock requires attached thread state.
+ // If locking returns PY_LOCK_FAILURE, a Python exception *may* be raised.
+ // (Intended for use with _PY_LOCK_HANDLE_SIGNALS and _PY_LOCK_DETACH.)
+ _PY_LOCK_PYTHONLOCK = 8,
} _PyLockFlags;
// Lock a mutex with an optional timeout and additional options. See
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index 50807e68e9a..40f8ca68c00 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -614,7 +614,7 @@ static inline PyObject *
_Py_XGetRef(PyObject **ptr)
{
for (;;) {
- PyObject *value = _Py_atomic_load_ptr(ptr);
+ PyObject *value = _PyObject_CAST(_Py_atomic_load_ptr(ptr));
if (value == NULL) {
return value;
}
@@ -629,7 +629,7 @@ _Py_XGetRef(PyObject **ptr)
static inline PyObject *
_Py_TryXGetRef(PyObject **ptr)
{
- PyObject *value = _Py_atomic_load_ptr(ptr);
+ PyObject *value = _PyObject_CAST(_Py_atomic_load_ptr(ptr));
if (value == NULL) {
return value;
}
@@ -918,7 +918,7 @@ extern PyObject *_PyType_LookupRefAndVersion(PyTypeObject *, PyObject *,
extern unsigned int
_PyType_LookupStackRefAndVersion(PyTypeObject *type, PyObject *name, _PyStackRef *out);
-extern int _PyObject_GetMethodStackRef(PyThreadState *ts, PyObject *obj,
+PyAPI_FUNC(int) _PyObject_GetMethodStackRef(PyThreadState *ts, PyObject *obj,
PyObject *name, _PyStackRef *method);
// Cache the provided init method in the specialization cache of type if the
@@ -1033,6 +1033,20 @@ enum _PyAnnotateFormat {
int _PyObject_SetDict(PyObject *obj, PyObject *value);
+#ifndef Py_GIL_DISABLED
+static inline Py_ALWAYS_INLINE void _Py_INCREF_MORTAL(PyObject *op)
+{
+ assert(!_Py_IsStaticImmortal(op));
+ op->ob_refcnt++;
+ _Py_INCREF_STAT_INC();
+#if defined(Py_REF_DEBUG) && !defined(Py_LIMITED_API)
+ if (!_Py_IsImmortal(op)) {
+ _Py_INCREF_IncRefTotal();
+ }
+#endif
+}
+#endif
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h
index 576c2794782..8b7f12bf03d 100644
--- a/Include/internal/pycore_optimizer.h
+++ b/Include/internal/pycore_optimizer.h
@@ -10,7 +10,7 @@ extern "C" {
#include "pycore_typedefs.h" // _PyInterpreterFrame
#include "pycore_uop_ids.h"
-#include "pycore_stackref.h"
+#include "pycore_stackref.h" // _PyStackRef
#include <stdbool.h>
@@ -316,6 +316,9 @@ extern JitOptRef _Py_uop_sym_new_type(
JitOptContext *ctx, PyTypeObject *typ);
extern JitOptRef _Py_uop_sym_new_const(JitOptContext *ctx, PyObject *const_val);
+extern JitOptRef _Py_uop_sym_new_const_steal(JitOptContext *ctx, PyObject *const_val);
+bool _Py_uop_sym_is_safe_const(JitOptContext *ctx, JitOptRef sym);
+_PyStackRef _Py_uop_sym_get_const_as_stackref(JitOptContext *ctx, JitOptRef sym);
extern JitOptRef _Py_uop_sym_new_null(JitOptContext *ctx);
extern bool _Py_uop_sym_has_type(JitOptRef sym);
extern bool _Py_uop_sym_matches_type(JitOptRef sym, PyTypeObject *typ);
diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h
index 633e5cf77db..ea3dfbd2eef 100644
--- a/Include/internal/pycore_pystate.h
+++ b/Include/internal/pycore_pystate.h
@@ -8,6 +8,7 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
+#include "pycore_pythonrun.h" // _PyOS_STACK_MARGIN_SHIFT
#include "pycore_typedefs.h" // _PyRuntimeState
#include "pycore_tstate.h"
@@ -325,7 +326,7 @@ _Py_RecursionLimit_GetMargin(PyThreadState *tstate)
_PyThreadStateImpl *_tstate = (_PyThreadStateImpl *)tstate;
assert(_tstate->c_stack_hard_limit != 0);
intptr_t here_addr = _Py_get_machine_stack_pointer();
- return Py_ARITHMETIC_RIGHT_SHIFT(intptr_t, here_addr - (intptr_t)_tstate->c_stack_soft_limit, PYOS_STACK_MARGIN_SHIFT);
+ return Py_ARITHMETIC_RIGHT_SHIFT(intptr_t, here_addr - (intptr_t)_tstate->c_stack_soft_limit, _PyOS_STACK_MARGIN_SHIFT);
}
#ifdef __cplusplus
diff --git a/Include/internal/pycore_pythonrun.h b/Include/internal/pycore_pythonrun.h
index 7daed1326af..c2832098ddb 100644
--- a/Include/internal/pycore_pythonrun.h
+++ b/Include/internal/pycore_pythonrun.h
@@ -33,6 +33,28 @@ extern const char* _Py_SourceAsString(
PyCompilerFlags *cf,
PyObject **cmd_copy);
+
+/* Stack size, in "pointers". This must be large enough, so
+ * no two calls to check recursion depth are more than this far
+ * apart. In practice, that means it must be larger than the C
+ * stack consumption of PyEval_EvalDefault */
+#if defined(_Py_ADDRESS_SANITIZER) || defined(_Py_THREAD_SANITIZER)
+# define _PyOS_LOG2_STACK_MARGIN 12
+#elif defined(Py_DEBUG) && defined(WIN32)
+# define _PyOS_LOG2_STACK_MARGIN 12
+#else
+# define _PyOS_LOG2_STACK_MARGIN 11
+#endif
+#define _PyOS_STACK_MARGIN (1 << _PyOS_LOG2_STACK_MARGIN)
+#define _PyOS_STACK_MARGIN_BYTES (_PyOS_STACK_MARGIN * sizeof(void *))
+
+#if SIZEOF_VOID_P == 8
+# define _PyOS_STACK_MARGIN_SHIFT (_PyOS_LOG2_STACK_MARGIN + 3)
+#else
+# define _PyOS_STACK_MARGIN_SHIFT (_PyOS_LOG2_STACK_MARGIN + 2)
+#endif
+
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h
index d172cc1485d..d378fcae26c 100644
--- a/Include/internal/pycore_runtime_init_generated.h
+++ b/Include/internal/pycore_runtime_init_generated.h
@@ -1134,6 +1134,7 @@ extern "C" {
INIT_ID(offset_src), \
INIT_ID(on_type_read), \
INIT_ID(onceregistry), \
+ INIT_ID(only_active_thread), \
INIT_ID(only_keys), \
INIT_ID(oparg), \
INIT_ID(opcode), \
diff --git a/Include/internal/pycore_stackref.h b/Include/internal/pycore_stackref.h
index 10e7199269e..6bf82d8322f 100644
--- a/Include/internal/pycore_stackref.h
+++ b/Include/internal/pycore_stackref.h
@@ -626,7 +626,7 @@ _PyStackRef_FromPyObjectNew(PyObject *obj)
if (_Py_IsImmortal(obj)) {
return (_PyStackRef){ .bits = ((uintptr_t)obj) | Py_TAG_REFCNT};
}
- Py_INCREF_MORTAL(obj);
+ _Py_INCREF_MORTAL(obj);
_PyStackRef ref = (_PyStackRef){ .bits = (uintptr_t)obj };
PyStackRef_CheckValid(ref);
return ref;
@@ -637,7 +637,7 @@ static inline _PyStackRef
_PyStackRef_FromPyObjectNewMortal(PyObject *obj)
{
assert(obj != NULL);
- Py_INCREF_MORTAL(obj);
+ _Py_INCREF_MORTAL(obj);
_PyStackRef ref = (_PyStackRef){ .bits = (uintptr_t)obj };
PyStackRef_CheckValid(ref);
return ref;
@@ -654,14 +654,14 @@ PyStackRef_FromPyObjectBorrow(PyObject *obj)
/* WARNING: This macro evaluates its argument more than once */
#ifdef _WIN32
#define PyStackRef_DUP(REF) \
- (PyStackRef_RefcountOnObject(REF) ? (Py_INCREF_MORTAL(BITS_TO_PTR(REF)), (REF)) : (REF))
+ (PyStackRef_RefcountOnObject(REF) ? (_Py_INCREF_MORTAL(BITS_TO_PTR(REF)), (REF)) : (REF))
#else
static inline _PyStackRef
PyStackRef_DUP(_PyStackRef ref)
{
assert(!PyStackRef_IsNull(ref));
if (PyStackRef_RefcountOnObject(ref)) {
- Py_INCREF_MORTAL(BITS_TO_PTR(ref));
+ _Py_INCREF_MORTAL(BITS_TO_PTR(ref));
}
return ref;
}
@@ -829,7 +829,7 @@ _Py_TryIncrefCompareStackRef(PyObject **src, PyObject *op, _PyStackRef *out)
static inline int
_Py_TryXGetStackRef(PyObject **src, _PyStackRef *out)
{
- PyObject *op = _Py_atomic_load_ptr_relaxed(src);
+ PyObject *op = _PyObject_CAST(_Py_atomic_load_ptr_relaxed(src));
if (op == NULL) {
*out = PyStackRef_NULL;
return 1;
diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h
index 0a9be4e41ac..e516211f6c6 100644
--- a/Include/internal/pycore_unicodeobject_generated.h
+++ b/Include/internal/pycore_unicodeobject_generated.h
@@ -2296,6 +2296,10 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) {
_PyUnicode_InternStatic(interp, &string);
assert(_PyUnicode_CheckConsistency(string, 1));
assert(PyUnicode_GET_LENGTH(string) != 1);
+ string = &_Py_ID(only_active_thread);
+ _PyUnicode_InternStatic(interp, &string);
+ assert(_PyUnicode_CheckConsistency(string, 1));
+ assert(PyUnicode_GET_LENGTH(string) != 1);
string = &_Py_ID(only_keys);
_PyUnicode_InternStatic(interp, &string);
assert(_PyUnicode_CheckConsistency(string, 1));
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index 52cbc2fffe4..ff7e800aa9b 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -106,7 +106,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_BINARY_OP_ADD_UNICODE] = HAS_ERROR_FLAG | HAS_PURE_FLAG,
[_BINARY_OP_INPLACE_ADD_UNICODE] = HAS_LOCAL_FLAG | HAS_DEOPT_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_GUARD_BINARY_OP_EXTEND] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
- [_BINARY_OP_EXTEND] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG,
+ [_BINARY_OP_EXTEND] = HAS_ESCAPES_FLAG,
[_BINARY_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_STORE_SLICE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_BINARY_OP_SUBSCR_LIST_INT] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
diff --git a/Include/internal/pycore_weakref.h b/Include/internal/pycore_weakref.h
index 950aa0af290..4ed8928c0b9 100644
--- a/Include/internal/pycore_weakref.h
+++ b/Include/internal/pycore_weakref.h
@@ -29,6 +29,12 @@ extern "C" {
PyMutex_LockFlags(wr->weakrefs_lock, _Py_LOCK_DONT_DETACH)
#define UNLOCK_WEAKREFS_FOR_WR(wr) PyMutex_Unlock(wr->weakrefs_lock)
+#define FT_CLEAR_WEAKREFS(obj, weakref_list) \
+ do { \
+ assert(Py_REFCNT(obj) == 0); \
+ PyObject_ClearWeakRefs(obj); \
+ } while (0)
+
#else
#define LOCK_WEAKREFS(obj)
@@ -37,6 +43,14 @@ extern "C" {
#define LOCK_WEAKREFS_FOR_WR(wr)
#define UNLOCK_WEAKREFS_FOR_WR(wr)
+#define FT_CLEAR_WEAKREFS(obj, weakref_list) \
+ do { \
+ assert(Py_REFCNT(obj) == 0); \
+ if (weakref_list != NULL) { \
+ PyObject_ClearWeakRefs(obj); \
+ } \
+ } while (0)
+
#endif
static inline int _is_dead(PyObject *obj)
diff --git a/Include/pyport.h b/Include/pyport.h
index 0675294d5bc..89829373be2 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -667,25 +667,6 @@ extern "C" {
#endif
-// _Py_NO_SANITIZE_UNDEFINED(): Disable Undefined Behavior sanitizer (UBsan)
-// on a function.
-//
-// Clang and GCC 9.0+ use __attribute__((no_sanitize("undefined"))).
-// GCC 4.9+ uses __attribute__((no_sanitize_undefined)).
-#if defined(__has_feature)
-# if __has_feature(undefined_behavior_sanitizer)
-# define _Py_NO_SANITIZE_UNDEFINED __attribute__((no_sanitize("undefined")))
-# endif
-#endif
-#if !defined(_Py_NO_SANITIZE_UNDEFINED) && defined(__GNUC__) \
- && ((__GNUC__ >= 5) || (__GNUC__ == 4) && (__GNUC_MINOR__ >= 9))
-# define _Py_NO_SANITIZE_UNDEFINED __attribute__((no_sanitize_undefined))
-#endif
-#ifndef _Py_NO_SANITIZE_UNDEFINED
-# define _Py_NO_SANITIZE_UNDEFINED
-#endif
-
-
// _Py_NONSTRING: The nonstring variable attribute specifies that an object or
// member declaration with type array of char, signed char, or unsigned char,
// or pointer to such a type is intended to store character arrays that do not
diff --git a/Include/pythonrun.h b/Include/pythonrun.h
index 716c4ab3c81..92b50aa807b 100644
--- a/Include/pythonrun.h
+++ b/Include/pythonrun.h
@@ -21,36 +21,15 @@ PyAPI_FUNC(void) PyErr_DisplayException(PyObject *);
/* Stuff with no proper home (yet) */
PyAPI_DATA(int) (*PyOS_InputHook)(void);
-/* Stack size, in "pointers". This must be large enough, so
- * no two calls to check recursion depth are more than this far
- * apart. In practice, that means it must be larger than the C
- * stack consumption of PyEval_EvalDefault */
-#if defined(_Py_ADDRESS_SANITIZER) || defined(_Py_THREAD_SANITIZER)
-# define PYOS_LOG2_STACK_MARGIN 12
-#elif defined(Py_DEBUG) && defined(WIN32)
-# define PYOS_LOG2_STACK_MARGIN 12
-#else
-# define PYOS_LOG2_STACK_MARGIN 11
-#endif
-#define PYOS_STACK_MARGIN (1 << PYOS_LOG2_STACK_MARGIN)
-#define PYOS_STACK_MARGIN_BYTES (PYOS_STACK_MARGIN * sizeof(void *))
-
-#if SIZEOF_VOID_P == 8
-#define PYOS_STACK_MARGIN_SHIFT (PYOS_LOG2_STACK_MARGIN + 3)
-#else
-#define PYOS_STACK_MARGIN_SHIFT (PYOS_LOG2_STACK_MARGIN + 2)
-#endif
-
-
#if defined(WIN32)
-#define USE_STACKCHECK
+# define USE_STACKCHECK
#endif
-
#ifdef USE_STACKCHECK
/* Check that we aren't overflowing our stack */
PyAPI_FUNC(int) PyOS_CheckStack(void);
#endif
+
#ifndef Py_LIMITED_API
# define Py_CPYTHON_PYTHONRUN_H
# include "cpython/pythonrun.h"
diff --git a/Include/refcount.h b/Include/refcount.h
index 034c453f449..457972b6dcf 100644
--- a/Include/refcount.h
+++ b/Include/refcount.h
@@ -30,7 +30,7 @@ increase and decrease the objects reference count.
In order to offer sufficient resilience to C extensions using the stable ABI
compiled against 3.11 or earlier, we set the initial value near the
-middle of the range (2**31, 2**32). That way the the refcount can be
+middle of the range (2**31, 2**32). That way the refcount can be
off by ~1 billion without affecting immortality.
Reference count increases will use saturated arithmetic, taking advantage of
@@ -244,20 +244,6 @@ PyAPI_FUNC(void) Py_DecRef(PyObject *);
PyAPI_FUNC(void) _Py_IncRef(PyObject *);
PyAPI_FUNC(void) _Py_DecRef(PyObject *);
-#ifndef Py_GIL_DISABLED
-static inline Py_ALWAYS_INLINE void Py_INCREF_MORTAL(PyObject *op)
-{
- assert(!_Py_IsStaticImmortal(op));
- op->ob_refcnt++;
- _Py_INCREF_STAT_INC();
-#if defined(Py_REF_DEBUG) && !defined(Py_LIMITED_API)
- if (!_Py_IsImmortal(op)) {
- _Py_INCREF_IncRefTotal();
- }
-#endif
-}
-#endif
-
static inline Py_ALWAYS_INLINE void Py_INCREF(PyObject *op)
{
#if defined(Py_LIMITED_API) && (Py_LIMITED_API+0 >= 0x030c0000 || defined(Py_REF_DEBUG))
diff --git a/InternalDocs/garbage_collector.md b/InternalDocs/garbage_collector.md
index 4da6cd47dc8..9c35684c945 100644
--- a/InternalDocs/garbage_collector.md
+++ b/InternalDocs/garbage_collector.md
@@ -286,7 +286,7 @@ object, the GC does not process it twice.
Notice that an object that was marked as "tentatively unreachable" and was later
moved back to the reachable list will be visited again by the garbage collector
-as now all the references that that object has need to be processed as well. This
+as now all the references that the object has need to be processed as well. This
process is really a breadth first search over the object graph. Once all the objects
are scanned, the GC knows that all container objects in the tentatively unreachable
list are really unreachable and can thus be garbage collected.
diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py
index 781b38ec26b..9b8e42a2342 100644
--- a/Lib/_pydecimal.py
+++ b/Lib/_pydecimal.py
@@ -6122,7 +6122,11 @@ _parse_format_specifier_regex = re.compile(r"""\A
(?P<zeropad>0)?
(?P<minimumwidth>\d+)?
(?P<thousands_sep>[,_])?
-(?:\.(?P<precision>\d+))?
+(?:\.
+ (?=[\d,_]) # lookahead for digit or separator
+ (?P<precision>\d+)?
+ (?P<frac_separators>[,_])?
+)?
(?P<type>[eEfFgGn%])?
\z
""", re.VERBOSE|re.DOTALL)
@@ -6215,6 +6219,9 @@ def _parse_format_specifier(format_spec, _localeconv=None):
format_dict['grouping'] = [3, 0]
format_dict['decimal_point'] = '.'
+ if format_dict['frac_separators'] is None:
+ format_dict['frac_separators'] = ''
+
return format_dict
def _format_align(sign, body, spec):
@@ -6334,6 +6341,11 @@ def _format_number(is_negative, intpart, fracpart, exp, spec):
sign = _format_sign(is_negative, spec)
+ frac_sep = spec['frac_separators']
+ if fracpart and frac_sep:
+ fracpart = frac_sep.join(fracpart[pos:pos + 3]
+ for pos in range(0, len(fracpart), 3))
+
if fracpart or spec['alt']:
fracpart = spec['decimal_point'] + fracpart
diff --git a/Lib/_pyio.py b/Lib/_pyio.py
index fb2a6d049ca..5db8ce9244b 100644
--- a/Lib/_pyio.py
+++ b/Lib/_pyio.py
@@ -876,16 +876,28 @@ class BytesIO(BufferedIOBase):
_buffer = None
def __init__(self, initial_bytes=None):
+ # Use to keep self._buffer and self._pos consistent.
+ self._lock = Lock()
+
buf = bytearray()
if initial_bytes is not None:
buf += initial_bytes
- self._buffer = buf
- self._pos = 0
+
+ with self._lock:
+ self._buffer = buf
+ self._pos = 0
def __getstate__(self):
if self.closed:
raise ValueError("__getstate__ on closed file")
- return self.__dict__.copy()
+ with self._lock:
+ state = self.__dict__.copy()
+ del state['_lock']
+ return state
+
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self._lock = Lock()
def getvalue(self):
"""Return the bytes value (contents) of the buffer
@@ -918,14 +930,16 @@ class BytesIO(BufferedIOBase):
raise TypeError(f"{size!r} is not an integer")
else:
size = size_index()
- if size < 0:
- size = len(self._buffer)
- if len(self._buffer) <= self._pos:
- return b""
- newpos = min(len(self._buffer), self._pos + size)
- b = self._buffer[self._pos : newpos]
- self._pos = newpos
- return bytes(b)
+
+ with self._lock:
+ if size < 0:
+ size = len(self._buffer)
+ if len(self._buffer) <= self._pos:
+ return b""
+ newpos = min(len(self._buffer), self._pos + size)
+ b = self._buffer[self._pos : newpos]
+ self._pos = newpos
+ return bytes(b)
def read1(self, size=-1):
"""This is the same as read.
@@ -941,12 +955,14 @@ class BytesIO(BufferedIOBase):
n = view.nbytes # Size of any bytes-like object
if n == 0:
return 0
- pos = self._pos
- if pos > len(self._buffer):
- # Pad buffer to pos with null bytes.
- self._buffer.resize(pos)
- self._buffer[pos:pos + n] = b
- self._pos += n
+
+ with self._lock:
+ pos = self._pos
+ if pos > len(self._buffer):
+ # Pad buffer to pos with null bytes.
+ self._buffer.resize(pos)
+ self._buffer[pos:pos + n] = b
+ self._pos += n
return n
def seek(self, pos, whence=0):
@@ -963,9 +979,11 @@ class BytesIO(BufferedIOBase):
raise ValueError("negative seek position %r" % (pos,))
self._pos = pos
elif whence == 1:
- self._pos = max(0, self._pos + pos)
+ with self._lock:
+ self._pos = max(0, self._pos + pos)
elif whence == 2:
- self._pos = max(0, len(self._buffer) + pos)
+ with self._lock:
+ self._pos = max(0, len(self._buffer) + pos)
else:
raise ValueError("unsupported whence value")
return self._pos
@@ -978,18 +996,20 @@ class BytesIO(BufferedIOBase):
def truncate(self, pos=None):
if self.closed:
raise ValueError("truncate on closed file")
- if pos is None:
- pos = self._pos
- else:
- try:
- pos_index = pos.__index__
- except AttributeError:
- raise TypeError(f"{pos!r} is not an integer")
+
+ with self._lock:
+ if pos is None:
+ pos = self._pos
else:
- pos = pos_index()
- if pos < 0:
- raise ValueError("negative truncate position %r" % (pos,))
- del self._buffer[pos:]
+ try:
+ pos_index = pos.__index__
+ except AttributeError:
+ raise TypeError(f"{pos!r} is not an integer")
+ else:
+ pos = pos_index()
+ if pos < 0:
+ raise ValueError("negative truncate position %r" % (pos,))
+ del self._buffer[pos:]
return pos
def readable(self):
diff --git a/Lib/_strptime.py b/Lib/_strptime.py
index ae67949626d..cdc55e8daaf 100644
--- a/Lib/_strptime.py
+++ b/Lib/_strptime.py
@@ -14,6 +14,7 @@ import os
import time
import locale
import calendar
+import re
from re import compile as re_compile
from re import sub as re_sub
from re import IGNORECASE
@@ -41,6 +42,29 @@ def _findall(haystack, needle):
yield i
i += len(needle)
+def _fixmonths(months):
+ yield from months
+ # The lower case of 'İ' ('\u0130') is 'i\u0307'.
+ # The re module only supports 1-to-1 character matching in
+ # case-insensitive mode.
+ for s in months:
+ if 'i\u0307' in s:
+ yield s.replace('i\u0307', '\u0130')
+
+lzh_TW_alt_digits = (
+ # 〇:一:二:三:四:五:六:七:八:九
+ '\u3007', '\u4e00', '\u4e8c', '\u4e09', '\u56db',
+ '\u4e94', '\u516d', '\u4e03', '\u516b', '\u4e5d',
+ # 十:十一:十二:十三:十四:十五:十六:十七:十八:十九
+ '\u5341', '\u5341\u4e00', '\u5341\u4e8c', '\u5341\u4e09', '\u5341\u56db',
+ '\u5341\u4e94', '\u5341\u516d', '\u5341\u4e03', '\u5341\u516b', '\u5341\u4e5d',
+ # 廿:廿一:廿二:廿三:廿四:廿五:廿六:廿七:廿八:廿九
+ '\u5eff', '\u5eff\u4e00', '\u5eff\u4e8c', '\u5eff\u4e09', '\u5eff\u56db',
+ '\u5eff\u4e94', '\u5eff\u516d', '\u5eff\u4e03', '\u5eff\u516b', '\u5eff\u4e5d',
+ # 卅:卅一
+ '\u5345', '\u5345\u4e00')
+
+
class LocaleTime(object):
"""Stores and handles locale-specific information related to time.
@@ -84,6 +108,7 @@ class LocaleTime(object):
self.__calc_weekday()
self.__calc_month()
self.__calc_am_pm()
+ self.__calc_alt_digits()
self.__calc_timezone()
self.__calc_date_time()
if _getlang() != self.lang:
@@ -119,9 +144,43 @@ class LocaleTime(object):
am_pm.append(time.strftime("%p", time_tuple).lower().strip())
self.am_pm = am_pm
+ def __calc_alt_digits(self):
+ # Set self.LC_alt_digits by using time.strftime().
+
+ # The magic data should contain all decimal digits.
+ time_tuple = time.struct_time((1998, 1, 27, 10, 43, 56, 1, 27, 0))
+ s = time.strftime("%x%X", time_tuple)
+ if s.isascii():
+ # Fast path -- all digits are ASCII.
+ self.LC_alt_digits = ()
+ return
+
+ digits = ''.join(sorted(set(re.findall(r'\d', s))))
+ if len(digits) == 10 and ord(digits[-1]) == ord(digits[0]) + 9:
+ # All 10 decimal digits from the same set.
+ if digits.isascii():
+ # All digits are ASCII.
+ self.LC_alt_digits = ()
+ return
+
+ self.LC_alt_digits = [a + b for a in digits for b in digits]
+ # Test whether the numbers contain leading zero.
+ time_tuple2 = time.struct_time((2000, 1, 1, 1, 1, 1, 5, 1, 0))
+ if self.LC_alt_digits[1] not in time.strftime("%x %X", time_tuple2):
+ self.LC_alt_digits[:10] = digits
+ return
+
+ # Either non-Gregorian calendar or non-decimal numbers.
+ if {'\u4e00', '\u4e03', '\u4e5d', '\u5341', '\u5eff'}.issubset(s):
+ # lzh_TW
+ self.LC_alt_digits = lzh_TW_alt_digits
+ return
+
+ self.LC_alt_digits = None
+
def __calc_date_time(self):
- # Set self.date_time, self.date, & self.time by using
- # time.strftime().
+ # Set self.LC_date_time, self.LC_date, self.LC_time and
+ # self.LC_time_ampm by using time.strftime().
# Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
# overloaded numbers is minimized. The order in which searches for
@@ -129,26 +188,32 @@ class LocaleTime(object):
# possible ambiguity for what something represents.
time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
time_tuple2 = time.struct_time((1999,1,3,1,1,1,6,3,0))
- replacement_pairs = [
+ replacement_pairs = []
+
+ # Non-ASCII digits
+ if self.LC_alt_digits or self.LC_alt_digits is None:
+ for n, d in [(19, '%OC'), (99, '%Oy'), (22, '%OH'),
+ (44, '%OM'), (55, '%OS'), (17, '%Od'),
+ (3, '%Om'), (2, '%Ow'), (10, '%OI')]:
+ if self.LC_alt_digits is None:
+ s = chr(0x660 + n // 10) + chr(0x660 + n % 10)
+ replacement_pairs.append((s, d))
+ if n < 10:
+ replacement_pairs.append((s[1], d))
+ elif len(self.LC_alt_digits) > n:
+ replacement_pairs.append((self.LC_alt_digits[n], d))
+ else:
+ replacement_pairs.append((time.strftime(d, time_tuple), d))
+ replacement_pairs += [
('1999', '%Y'), ('99', '%y'), ('22', '%H'),
('44', '%M'), ('55', '%S'), ('76', '%j'),
('17', '%d'), ('03', '%m'), ('3', '%m'),
# '3' needed for when no leading zero.
('2', '%w'), ('10', '%I'),
- # Non-ASCII digits
- ('\u0661\u0669\u0669\u0669', '%Y'),
- ('\u0669\u0669', '%Oy'),
- ('\u0662\u0662', '%OH'),
- ('\u0664\u0664', '%OM'),
- ('\u0665\u0665', '%OS'),
- ('\u0661\u0667', '%Od'),
- ('\u0660\u0663', '%Om'),
- ('\u0663', '%Om'),
- ('\u0662', '%Ow'),
- ('\u0661\u0660', '%OI'),
]
+
date_time = []
- for directive in ('%c', '%x', '%X'):
+ for directive in ('%c', '%x', '%X', '%r'):
current_format = time.strftime(directive, time_tuple).lower()
current_format = current_format.replace('%', '%%')
# The month and the day of the week formats are treated specially
@@ -172,9 +237,10 @@ class LocaleTime(object):
if tz:
current_format = current_format.replace(tz, "%Z")
# Transform all non-ASCII digits to digits in range U+0660 to U+0669.
- current_format = re_sub(r'\d(?<![0-9])',
- lambda m: chr(0x0660 + int(m[0])),
- current_format)
+ if not current_format.isascii() and self.LC_alt_digits is None:
+ current_format = re_sub(r'\d(?<![0-9])',
+ lambda m: chr(0x0660 + int(m[0])),
+ current_format)
for old, new in replacement_pairs:
current_format = current_format.replace(old, new)
# If %W is used, then Sunday, 2005-01-03 will fall on week 0 since
@@ -189,6 +255,7 @@ class LocaleTime(object):
self.LC_date_time = date_time[0]
self.LC_date = date_time[1]
self.LC_time = date_time[2]
+ self.LC_time_ampm = date_time[3]
def __find_month_format(self, directive):
"""Find the month format appropriate for the current locale.
@@ -213,7 +280,7 @@ class LocaleTime(object):
full_indices &= indices
indices = set(_findall(datetime, self.a_month[m]))
if abbr_indices is None:
- abbr_indices = indices
+ abbr_indices = set(indices)
else:
abbr_indices &= indices
if not full_indices and not abbr_indices:
@@ -241,7 +308,7 @@ class LocaleTime(object):
if self.f_weekday[wd] != self.a_weekday[wd]:
indices = set(_findall(datetime, self.a_weekday[wd]))
if abbr_indices is None:
- abbr_indices = indices
+ abbr_indices = set(indices)
else:
abbr_indices &= indices
if not full_indices and not abbr_indices:
@@ -288,8 +355,10 @@ class TimeRE(dict):
# The " [1-9]" part of the regex is to make %c from ANSI C work
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
'f': r"(?P<f>[0-9]{1,6})",
- 'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
+ 'H': r"(?P<H>2[0-3]|[0-1]\d|\d| \d)",
+ 'k': r"(?P<H>2[0-3]|[0-1]\d|\d| \d)",
'I': r"(?P<I>1[0-2]|0[1-9]|[1-9]| [1-9])",
+ 'l': r"(?P<I>1[0-2]|0[1-9]|[1-9]| [1-9])",
'G': r"(?P<G>\d\d\d\d)",
'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
@@ -305,23 +374,56 @@ class TimeRE(dict):
'z': r"(?P<z>([+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?)|(?-i:Z))?",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
- 'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
- 'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
+ 'B': self.__seqToRE(_fixmonths(self.locale_time.f_month[1:]), 'B'),
+ 'b': self.__seqToRE(_fixmonths(self.locale_time.a_month[1:]), 'b'),
'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
for tz in tz_names),
'Z'),
'%': '%'}
- for d in 'dmyHIMS':
- mapping['O' + d] = r'(?P<%s>\d\d|\d| \d)' % d
- mapping['Ow'] = r'(?P<w>\d)'
+ if self.locale_time.LC_alt_digits is None:
+ for d in 'dmyCHIMS':
+ mapping['O' + d] = r'(?P<%s>\d\d|\d| \d)' % d
+ mapping['Ow'] = r'(?P<w>\d)'
+ else:
+ mapping.update({
+ 'Od': self.__seqToRE(self.locale_time.LC_alt_digits[1:32], 'd',
+ '3[0-1]|[1-2][0-9]|0[1-9]|[1-9]'),
+ 'Om': self.__seqToRE(self.locale_time.LC_alt_digits[1:13], 'm',
+ '1[0-2]|0[1-9]|[1-9]'),
+ 'Ow': self.__seqToRE(self.locale_time.LC_alt_digits[:7], 'w',
+ '[0-6]'),
+ 'Oy': self.__seqToRE(self.locale_time.LC_alt_digits, 'y',
+ '[0-9][0-9]'),
+ 'OC': self.__seqToRE(self.locale_time.LC_alt_digits, 'C',
+ '[0-9][0-9]'),
+ 'OH': self.__seqToRE(self.locale_time.LC_alt_digits[:24], 'H',
+ '2[0-3]|[0-1][0-9]|[0-9]'),
+ 'OI': self.__seqToRE(self.locale_time.LC_alt_digits[1:13], 'I',
+ '1[0-2]|0[1-9]|[1-9]'),
+ 'OM': self.__seqToRE(self.locale_time.LC_alt_digits[:60], 'M',
+ '[0-5][0-9]|[0-9]'),
+ 'OS': self.__seqToRE(self.locale_time.LC_alt_digits[:62], 'S',
+ '6[0-1]|[0-5][0-9]|[0-9]'),
+ })
+ mapping.update({
+ 'e': mapping['d'],
+ 'Oe': mapping['Od'],
+ 'P': mapping['p'],
+ 'Op': mapping['p'],
+ 'W': mapping['U'].replace('U', 'W'),
+ })
mapping['W'] = mapping['U'].replace('U', 'W')
+
base.__init__(mapping)
+ base.__setitem__('T', self.pattern('%H:%M:%S'))
+ base.__setitem__('R', self.pattern('%H:%M'))
+ base.__setitem__('r', self.pattern(self.locale_time.LC_time_ampm))
base.__setitem__('X', self.pattern(self.locale_time.LC_time))
base.__setitem__('x', self.pattern(self.locale_time.LC_date))
base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
- def __seqToRE(self, to_convert, directive):
+ def __seqToRE(self, to_convert, directive, altregex=None):
"""Convert a list to a regex string for matching a directive.
Want possible matching values to be from longest to shortest. This
@@ -337,8 +439,9 @@ class TimeRE(dict):
else:
return ''
regex = '|'.join(re_escape(stuff) for stuff in to_convert)
- regex = '(?P<%s>%s' % (directive, regex)
- return '%s)' % regex
+ if altregex is not None:
+ regex += '|' + altregex
+ return '(?P<%s>%s)' % (directive, regex)
def pattern(self, format):
"""Return regex pattern for the format string.
@@ -365,7 +468,7 @@ class TimeRE(dict):
nonlocal day_of_month_in_format
day_of_month_in_format = True
return self[format_char]
- format = re_sub(r'%([OE]?\\?.?)', repl, format)
+ format = re_sub(r'%[-_0^#]*[0-9]*([OE]?\\?.?)', repl, format)
if day_of_month_in_format and not year_in_format:
import warnings
warnings.warn("""\
@@ -467,6 +570,15 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
# values
weekday = julian = None
found_dict = found.groupdict()
+ if locale_time.LC_alt_digits:
+ def parse_int(s):
+ try:
+ return locale_time.LC_alt_digits.index(s)
+ except ValueError:
+ return int(s)
+ else:
+ parse_int = int
+
for group_key in found_dict.keys():
# Directives not explicitly handled below:
# c, x, X
@@ -474,30 +586,34 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
# U, W
# worthless without day of the week
if group_key == 'y':
- year = int(found_dict['y'])
- # Open Group specification for strptime() states that a %y
- #value in the range of [00, 68] is in the century 2000, while
- #[69,99] is in the century 1900
- if year <= 68:
- year += 2000
+ year = parse_int(found_dict['y'])
+ if 'C' in found_dict:
+ century = parse_int(found_dict['C'])
+ year += century * 100
else:
- year += 1900
+ # Open Group specification for strptime() states that a %y
+ #value in the range of [00, 68] is in the century 2000, while
+ #[69,99] is in the century 1900
+ if year <= 68:
+ year += 2000
+ else:
+ year += 1900
elif group_key == 'Y':
year = int(found_dict['Y'])
elif group_key == 'G':
iso_year = int(found_dict['G'])
elif group_key == 'm':
- month = int(found_dict['m'])
+ month = parse_int(found_dict['m'])
elif group_key == 'B':
month = locale_time.f_month.index(found_dict['B'].lower())
elif group_key == 'b':
month = locale_time.a_month.index(found_dict['b'].lower())
elif group_key == 'd':
- day = int(found_dict['d'])
+ day = parse_int(found_dict['d'])
elif group_key == 'H':
- hour = int(found_dict['H'])
+ hour = parse_int(found_dict['H'])
elif group_key == 'I':
- hour = int(found_dict['I'])
+ hour = parse_int(found_dict['I'])
ampm = found_dict.get('p', '').lower()
# If there was no AM/PM indicator, we'll treat this like AM
if ampm in ('', locale_time.am_pm[0]):
@@ -513,9 +629,9 @@ def _strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
if hour != 12:
hour += 12
elif group_key == 'M':
- minute = int(found_dict['M'])
+ minute = parse_int(found_dict['M'])
elif group_key == 'S':
- second = int(found_dict['S'])
+ second = parse_int(found_dict['S'])
elif group_key == 'f':
s = found_dict['f']
# Pad to always return microseconds.
diff --git a/Lib/asyncio/base_events.py b/Lib/asyncio/base_events.py
index 04fb961e998..520d4b39854 100644
--- a/Lib/asyncio/base_events.py
+++ b/Lib/asyncio/base_events.py
@@ -1016,38 +1016,43 @@ class BaseEventLoop(events.AbstractEventLoop):
family, type_, proto, _, address = addr_info
sock = None
try:
- sock = socket.socket(family=family, type=type_, proto=proto)
- sock.setblocking(False)
- if local_addr_infos is not None:
- for lfamily, _, _, _, laddr in local_addr_infos:
- # skip local addresses of different family
- if lfamily != family:
- continue
- try:
- sock.bind(laddr)
- break
- except OSError as exc:
- msg = (
- f'error while attempting to bind on '
- f'address {laddr!r}: {str(exc).lower()}'
- )
- exc = OSError(exc.errno, msg)
- my_exceptions.append(exc)
- else: # all bind attempts failed
- if my_exceptions:
- raise my_exceptions.pop()
- else:
- raise OSError(f"no matching local address with {family=} found")
- await self.sock_connect(sock, address)
- return sock
- except OSError as exc:
- my_exceptions.append(exc)
- if sock is not None:
- sock.close()
- raise
+ try:
+ sock = socket.socket(family=family, type=type_, proto=proto)
+ sock.setblocking(False)
+ if local_addr_infos is not None:
+ for lfamily, _, _, _, laddr in local_addr_infos:
+ # skip local addresses of different family
+ if lfamily != family:
+ continue
+ try:
+ sock.bind(laddr)
+ break
+ except OSError as exc:
+ msg = (
+ f'error while attempting to bind on '
+ f'address {laddr!r}: {str(exc).lower()}'
+ )
+ exc = OSError(exc.errno, msg)
+ my_exceptions.append(exc)
+ else: # all bind attempts failed
+ if my_exceptions:
+ raise my_exceptions.pop()
+ else:
+ raise OSError(f"no matching local address with {family=} found")
+ await self.sock_connect(sock, address)
+ return sock
+ except OSError as exc:
+ my_exceptions.append(exc)
+ raise
except:
if sock is not None:
- sock.close()
+ try:
+ sock.close()
+ except OSError:
+ # An error when closing a newly created socket is
+ # not important, but it can overwrite more important
+ # non-OSError error. So ignore it.
+ pass
raise
finally:
exceptions = my_exceptions = None
@@ -1161,7 +1166,7 @@ class BaseEventLoop(events.AbstractEventLoop):
raise ExceptionGroup("create_connection failed", exceptions)
if len(exceptions) == 1:
raise exceptions[0]
- else:
+ elif exceptions:
# If they all have the same str(), raise one.
model = str(exceptions[0])
if all(str(exc) == model for exc in exceptions):
@@ -1170,6 +1175,9 @@ class BaseEventLoop(events.AbstractEventLoop):
# the various error messages.
raise OSError('Multiple exceptions: {}'.format(
', '.join(str(exc) for exc in exceptions)))
+ else:
+ # No exceptions were collected, raise a timeout error
+ raise TimeoutError('create_connection failed')
finally:
exceptions = None
diff --git a/Lib/concurrent/interpreters/__init__.py b/Lib/concurrent/interpreters/__init__.py
index 0fd661249a2..aa46a2b37a4 100644
--- a/Lib/concurrent/interpreters/__init__.py
+++ b/Lib/concurrent/interpreters/__init__.py
@@ -146,12 +146,8 @@ class Interpreter:
self._decref()
# for pickling:
- def __getnewargs__(self):
- return (self._id,)
-
- # for pickling:
- def __getstate__(self):
- return None
+ def __reduce__(self):
+ return (type(self), (self._id,))
def _decref(self):
if not self._ownsref:
diff --git a/Lib/concurrent/interpreters/_queues.py b/Lib/concurrent/interpreters/_queues.py
index 99987f2f692..9c12b2c8c24 100644
--- a/Lib/concurrent/interpreters/_queues.py
+++ b/Lib/concurrent/interpreters/_queues.py
@@ -129,12 +129,8 @@ class Queue:
return hash(self._id)
# for pickling:
- def __getnewargs__(self):
- return (self._id,)
-
- # for pickling:
- def __getstate__(self):
- return None
+ def __reduce__(self):
+ return (type(self), (self._id,))
def _set_unbound(self, op, items=None):
assert not hasattr(self, '_unbound')
diff --git a/Lib/difflib.py b/Lib/difflib.py
index 18801a9b19e..487936dbf47 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -78,8 +78,8 @@ class SequenceMatcher:
sequences. As a rule of thumb, a .ratio() value over 0.6 means the
sequences are close matches:
- >>> print(round(s.ratio(), 3))
- 0.866
+ >>> print(round(s.ratio(), 2))
+ 0.87
>>>
If you're only interested in where the sequences match,
diff --git a/Lib/encodings/idna.py b/Lib/encodings/idna.py
index 60a8d5eb227..0c90b4c9fe1 100644
--- a/Lib/encodings/idna.py
+++ b/Lib/encodings/idna.py
@@ -316,7 +316,7 @@ class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
def _buffer_decode(self, input, errors, final):
if errors != 'strict':
- raise UnicodeError("Unsupported error handling: {errors}")
+ raise UnicodeError(f"Unsupported error handling: {errors}")
if not input:
return ("", 0)
diff --git a/Lib/encodings/palmos.py b/Lib/encodings/palmos.py
index c506d654523..df164ca5b95 100644
--- a/Lib/encodings/palmos.py
+++ b/Lib/encodings/palmos.py
@@ -201,7 +201,7 @@ decoding_table = (
'\u02dc' # 0x98 -> SMALL TILDE
'\u2122' # 0x99 -> TRADE MARK SIGN
'\u0161' # 0x9A -> LATIN SMALL LETTER S WITH CARON
- '\x9b' # 0x9B -> <control>
+ '\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
diff --git a/Lib/fractions.py b/Lib/fractions.py
index cb05ae7c200..c1b12e7a1c0 100644
--- a/Lib/fractions.py
+++ b/Lib/fractions.py
@@ -170,7 +170,11 @@ _FLOAT_FORMAT_SPECIFICATION_MATCHER = re.compile(r"""
(?P<zeropad>0(?=[0-9]))?
(?P<minimumwidth>[0-9]+)?
(?P<thousands_sep>[,_])?
- (?:\.(?P<precision>[0-9]+))?
+ (?:\.
+ (?=[,_0-9]) # lookahead for digit or separator
+ (?P<precision>[0-9]+)?
+ (?P<frac_separators>[,_])?
+ )?
(?P<presentation_type>[eEfFgG%])
""", re.DOTALL | re.VERBOSE).fullmatch
@@ -499,11 +503,15 @@ class Fraction(numbers.Rational):
minimumwidth = int(match["minimumwidth"] or "0")
thousands_sep = match["thousands_sep"]
precision = int(match["precision"] or "6")
+ frac_sep = match["frac_separators"] or ""
presentation_type = match["presentation_type"]
trim_zeros = presentation_type in "gG" and not alternate_form
trim_point = not alternate_form
exponent_indicator = "E" if presentation_type in "EFG" else "e"
+ if align == '=' and fill == '0':
+ zeropad = True
+
# Round to get the digits we need, figure out where to place the point,
# and decide whether to use scientific notation. 'point_pos' is the
# relative to the _end_ of the digit string: that is, it's the number
@@ -552,6 +560,9 @@ class Fraction(numbers.Rational):
if trim_zeros:
frac_part = frac_part.rstrip("0")
separator = "" if trim_point and not frac_part else "."
+ if frac_sep:
+ frac_part = frac_sep.join(frac_part[pos:pos + 3]
+ for pos in range(0, len(frac_part), 3))
trailing = separator + frac_part + suffix
# Do zero padding if required.
diff --git a/Lib/html/parser.py b/Lib/html/parser.py
index ba416e7fa6e..9b4f0959913 100644
--- a/Lib/html/parser.py
+++ b/Lib/html/parser.py
@@ -29,17 +29,46 @@ attr_charref = re.compile(r'&(#[0-9]+|#[xX][0-9a-fA-F]+|[a-zA-Z][a-zA-Z0-9]*)[;=
starttagopen = re.compile('<[a-zA-Z]')
endtagopen = re.compile('</[a-zA-Z]')
piclose = re.compile('>')
-commentclose = re.compile(r'--\s*>')
+commentclose = re.compile(r'--!?>')
+commentabruptclose = re.compile(r'-?>')
# Note:
-# 1) if you change tagfind/attrfind remember to update locatestarttagend too;
-# 2) if you change tagfind/attrfind and/or locatestarttagend the parser will
+# 1) if you change tagfind/attrfind remember to update locatetagend too;
+# 2) if you change tagfind/attrfind and/or locatetagend the parser will
# explode, so don't do it.
-# see http://www.w3.org/TR/html5/tokenization.html#tag-open-state
-# and http://www.w3.org/TR/html5/tokenization.html#tag-name-state
-tagfind_tolerant = re.compile(r'([a-zA-Z][^\t\n\r\f />\x00]*)(?:\s|/(?!>))*')
-attrfind_tolerant = re.compile(
- r'((?<=[\'"\s/])[^\s/>][^\s/=>]*)(\s*=+\s*'
- r'(\'[^\']*\'|"[^"]*"|(?![\'"])[^>\s]*))?(?:\s|/(?!>))*')
+# see the HTML5 specs section "13.2.5.6 Tag open state",
+# "13.2.5.8 Tag name state" and "13.2.5.33 Attribute name state".
+# https://html.spec.whatwg.org/multipage/parsing.html#tag-open-state
+# https://html.spec.whatwg.org/multipage/parsing.html#tag-name-state
+# https://html.spec.whatwg.org/multipage/parsing.html#attribute-name-state
+tagfind_tolerant = re.compile(r'([a-zA-Z][^\t\n\r\f />]*)(?:[\t\n\r\f ]|/(?!>))*')
+attrfind_tolerant = re.compile(r"""
+ (
+ (?<=['"\t\n\r\f /])[^\t\n\r\f />][^\t\n\r\f /=>]* # attribute name
+ )
+ (= # value indicator
+ ('[^']*' # LITA-enclosed value
+ |"[^"]*" # LIT-enclosed value
+ |(?!['"])[^>\t\n\r\f ]* # bare value
+ )
+ )?
+ (?:[\t\n\r\f ]|/(?!>))* # possibly followed by a space
+""", re.VERBOSE)
+locatetagend = re.compile(r"""
+ [a-zA-Z][^\t\n\r\f />]* # tag name
+ [\t\n\r\f /]* # optional whitespace before attribute name
+ (?:(?<=['"\t\n\r\f /])[^\t\n\r\f />][^\t\n\r\f /=>]* # attribute name
+ (?:= # value indicator
+ (?:'[^']*' # LITA-enclosed value
+ |"[^"]*" # LIT-enclosed value
+ |(?!['"])[^>\t\n\r\f ]* # bare value
+ )
+ )?
+ [\t\n\r\f /]* # possibly followed by a space
+ )*
+ >?
+""", re.VERBOSE)
+# The following variables are not used, but are temporarily left for
+# backward compatibility.
locatestarttagend_tolerant = re.compile(r"""
<[a-zA-Z][^\t\n\r\f />\x00]* # tag name
(?:[\s/]* # optional whitespace before attribute name
@@ -56,8 +85,6 @@ locatestarttagend_tolerant = re.compile(r"""
\s* # trailing whitespace
""", re.VERBOSE)
endendtag = re.compile('>')
-# the HTML 5 spec, section 8.1.2.2, doesn't allow spaces between
-# </ and the tag name, so maybe this should be fixed
endtagfind = re.compile(r'</\s*([a-zA-Z][-.a-zA-Z0-9:_]*)\s*>')
# Character reference processing logic specific to attribute values
@@ -141,7 +168,8 @@ class HTMLParser(_markupbase.ParserBase):
def set_cdata_mode(self, elem):
self.cdata_elem = elem.lower()
- self.interesting = re.compile(r'</\s*%s\s*>' % self.cdata_elem, re.I)
+ self.interesting = re.compile(r'</%s(?=[\t\n\r\f />])' % self.cdata_elem,
+ re.IGNORECASE|re.ASCII)
def clear_cdata_mode(self):
self.interesting = interesting_normal
@@ -166,7 +194,7 @@ class HTMLParser(_markupbase.ParserBase):
# & near the end and see if it's followed by a space or ;.
amppos = rawdata.rfind('&', max(i, n-34))
if (amppos >= 0 and
- not re.compile(r'[\s;]').search(rawdata, amppos)):
+ not re.compile(r'[\t\n\r\f ;]').search(rawdata, amppos)):
break # wait till we get all the text
j = n
else:
@@ -309,8 +337,23 @@ class HTMLParser(_markupbase.ParserBase):
else:
return self.parse_bogus_comment(i)
+ # Internal -- parse comment, return length or -1 if not terminated
+ # see https://html.spec.whatwg.org/multipage/parsing.html#comment-start-state
+ def parse_comment(self, i, report=True):
+ rawdata = self.rawdata
+ assert rawdata.startswith('<!--', i), 'unexpected call to parse_comment()'
+ match = commentclose.search(rawdata, i+4)
+ if not match:
+ match = commentabruptclose.match(rawdata, i+4)
+ if not match:
+ return -1
+ if report:
+ j = match.start()
+ self.handle_comment(rawdata[i+4: j])
+ return match.end()
+
# Internal -- parse bogus comment, return length or -1 if not terminated
- # see http://www.w3.org/TR/html5/tokenization.html#bogus-comment-state
+ # see https://html.spec.whatwg.org/multipage/parsing.html#bogus-comment-state
def parse_bogus_comment(self, i, report=1):
rawdata = self.rawdata
assert rawdata[i:i+2] in ('<!', '</'), ('unexpected call to '
@@ -336,6 +379,8 @@ class HTMLParser(_markupbase.ParserBase):
# Internal -- handle starttag, return end or -1 if not terminated
def parse_starttag(self, i):
+ # See the HTML5 specs section "13.2.5.8 Tag name state"
+ # https://html.spec.whatwg.org/multipage/parsing.html#tag-name-state
self.__starttag_text = None
endpos = self.check_for_whole_start_tag(i)
if endpos < 0:
@@ -381,76 +426,42 @@ class HTMLParser(_markupbase.ParserBase):
# or -1 if incomplete.
def check_for_whole_start_tag(self, i):
rawdata = self.rawdata
- m = locatestarttagend_tolerant.match(rawdata, i)
- if m:
- j = m.end()
- next = rawdata[j:j+1]
- if next == ">":
- return j + 1
- if next == "/":
- if rawdata.startswith("/>", j):
- return j + 2
- if rawdata.startswith("/", j):
- # buffer boundary
- return -1
- # else bogus input
- if j > i:
- return j
- else:
- return i + 1
- if next == "":
- # end of input
- return -1
- if next in ("abcdefghijklmnopqrstuvwxyz=/"
- "ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
- # end of input in or before attribute value, or we have the
- # '/' from a '/>' ending
- return -1
- if j > i:
- return j
- else:
- return i + 1
- raise AssertionError("we should not get here!")
+ match = locatetagend.match(rawdata, i+1)
+ assert match
+ j = match.end()
+ if rawdata[j-1] != ">":
+ return -1
+ return j
# Internal -- parse endtag, return end or -1 if incomplete
def parse_endtag(self, i):
+ # See the HTML5 specs section "13.2.5.7 End tag open state"
+ # https://html.spec.whatwg.org/multipage/parsing.html#end-tag-open-state
rawdata = self.rawdata
assert rawdata[i:i+2] == "</", "unexpected call to parse_endtag"
- match = endendtag.search(rawdata, i+1) # >
- if not match:
+ if rawdata.find('>', i+2) < 0: # fast check
return -1
- gtpos = match.end()
- match = endtagfind.match(rawdata, i) # </ + tag + >
- if not match:
- if self.cdata_elem is not None:
- self.handle_data(rawdata[i:gtpos])
- return gtpos
- # find the name: w3.org/TR/html5/tokenization.html#tag-name-state
- namematch = tagfind_tolerant.match(rawdata, i+2)
- if not namematch:
- # w3.org/TR/html5/tokenization.html#end-tag-open-state
- if rawdata[i:i+3] == '</>':
- return i+3
- else:
- return self.parse_bogus_comment(i)
- tagname = namematch.group(1).lower()
- # consume and ignore other stuff between the name and the >
- # Note: this is not 100% correct, since we might have things like
- # </tag attr=">">, but looking for > after the name should cover
- # most of the cases and is much simpler
- gtpos = rawdata.find('>', namematch.end())
- self.handle_endtag(tagname)
- return gtpos+1
+ if not endtagopen.match(rawdata, i): # </ + letter
+ if rawdata[i+2:i+3] == '>': # </> is ignored
+ # "missing-end-tag-name" parser error
+ return i+3
+ else:
+ return self.parse_bogus_comment(i)
- elem = match.group(1).lower() # script or style
- if self.cdata_elem is not None:
- if elem != self.cdata_elem:
- self.handle_data(rawdata[i:gtpos])
- return gtpos
+ match = locatetagend.match(rawdata, i+2)
+ assert match
+ j = match.end()
+ if rawdata[j-1] != ">":
+ return -1
- self.handle_endtag(elem)
+ # find the name: "13.2.5.8 Tag name state"
+ # https://html.spec.whatwg.org/multipage/parsing.html#tag-name-state
+ match = tagfind_tolerant.match(rawdata, i+2)
+ assert match
+ tag = match.group(1).lower()
+ self.handle_endtag(tag)
self.clear_cdata_mode()
- return gtpos
+ return j
# Overridable -- finish processing of start+end tag: <tag.../>
def handle_startendtag(self, tag, attrs):
diff --git a/Lib/idlelib/configdialog.py b/Lib/idlelib/configdialog.py
index 4d2adb48570..e618ef07a90 100644
--- a/Lib/idlelib/configdialog.py
+++ b/Lib/idlelib/configdialog.py
@@ -435,7 +435,7 @@ class FontPage(Frame):
self.font_name.set(font.lower())
def set_samples(self, event=None):
- """Update update both screen samples with the font settings.
+ """Update both screen samples with the font settings.
Called on font initialization and change events.
Accesses font_name, font_size, and font_bold Variables.
diff --git a/Lib/idlelib/debugger.py b/Lib/idlelib/debugger.py
index d90dbcd11f9..1fae1d4b0ad 100644
--- a/Lib/idlelib/debugger.py
+++ b/Lib/idlelib/debugger.py
@@ -1,6 +1,6 @@
"""Debug user code with a GUI interface to a subclass of bdb.Bdb.
-The Idb idb and Debugger gui instances each need a reference to each
+The Idb instance 'idb' and Debugger instance 'gui' need references to each
other or to an rpc proxy for each other.
If IDLE is started with '-n', so that user code and idb both run in the
diff --git a/Lib/idlelib/editor.py b/Lib/idlelib/editor.py
index c76db20c587..17b498f63ba 100644
--- a/Lib/idlelib/editor.py
+++ b/Lib/idlelib/editor.py
@@ -1649,7 +1649,7 @@ class IndentSearcher:
self.finished = 1
def run(self):
- """Return 2 lines containing block opener and and indent.
+ """Return 2 lines containing block opener and indent.
Either the indent line or both may be None.
"""
diff --git a/Lib/idlelib/idle_test/htest.py b/Lib/idlelib/idle_test/htest.py
index a7293774eec..b63ff9ec287 100644
--- a/Lib/idlelib/idle_test/htest.py
+++ b/Lib/idlelib/idle_test/htest.py
@@ -337,7 +337,7 @@ _tree_widget_spec = {
'file': 'tree',
'kwds': {},
'msg': "The canvas is scrollable.\n"
- "Click on folders up to to the lowest level."
+ "Click on folders up to the lowest level."
}
_undo_delegator_spec = {
diff --git a/Lib/os.py b/Lib/os.py
index 643a7b2f581..12926c832f5 100644
--- a/Lib/os.py
+++ b/Lib/os.py
@@ -10,7 +10,7 @@ This exports:
- os.extsep is the extension separator (always '.')
- os.altsep is the alternate pathname separator (None or '/')
- os.pathsep is the component separator used in $PATH etc
- - os.linesep is the line separator in text files ('\r' or '\n' or '\r\n')
+ - os.linesep is the line separator in text files ('\n' or '\r\n')
- os.defpath is the default search path for executables
- os.devnull is the file path of the null device ('/dev/null', etc.)
diff --git a/Lib/platform.py b/Lib/platform.py
index e7f180fc5ac..da15bb4717b 100644
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -612,6 +612,9 @@ def system_alias(system, release, version):
### Various internal helpers
+# Table for cleaning up characters in filenames.
+_SIMPLE_SUBSTITUTIONS = str.maketrans(r' /\:;"()', r'_-------')
+
def _platform(*args):
""" Helper to format the platform string in a filename
@@ -621,28 +624,13 @@ def _platform(*args):
platform = '-'.join(x.strip() for x in filter(len, args))
# Cleanup some possible filename obstacles...
- platform = platform.replace(' ', '_')
- platform = platform.replace('/', '-')
- platform = platform.replace('\\', '-')
- platform = platform.replace(':', '-')
- platform = platform.replace(';', '-')
- platform = platform.replace('"', '-')
- platform = platform.replace('(', '-')
- platform = platform.replace(')', '-')
+ platform = platform.translate(_SIMPLE_SUBSTITUTIONS)
# No need to report 'unknown' information...
platform = platform.replace('unknown', '')
# Fold '--'s and remove trailing '-'
- while True:
- cleaned = platform.replace('--', '-')
- if cleaned == platform:
- break
- platform = cleaned
- while platform and platform[-1] == '-':
- platform = platform[:-1]
-
- return platform
+ return re.sub(r'-{2,}', '-', platform).rstrip('-')
def _node(default=''):
diff --git a/Lib/sre_compile.py b/Lib/sre_compile.py
deleted file mode 100644
index f9da61e6487..00000000000
--- a/Lib/sre_compile.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import warnings
-warnings.warn(f"module {__name__!r} is deprecated",
- DeprecationWarning,
- stacklevel=2)
-
-from re import _compiler as _
-globals().update({k: v for k, v in vars(_).items() if k[:2] != '__'})
diff --git a/Lib/sre_constants.py b/Lib/sre_constants.py
deleted file mode 100644
index fa09d044292..00000000000
--- a/Lib/sre_constants.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import warnings
-warnings.warn(f"module {__name__!r} is deprecated",
- DeprecationWarning,
- stacklevel=2)
-
-from re import _constants as _
-globals().update({k: v for k, v in vars(_).items() if k[:2] != '__'})
diff --git a/Lib/sre_parse.py b/Lib/sre_parse.py
deleted file mode 100644
index 25a3f557d44..00000000000
--- a/Lib/sre_parse.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import warnings
-warnings.warn(f"module {__name__!r} is deprecated",
- DeprecationWarning,
- stacklevel=2)
-
-from re import _parser as _
-globals().update({k: v for k, v in vars(_).items() if k[:2] != '__'})
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index 51c0ce11e82..fd39d3f7c95 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -2333,6 +2333,7 @@ def check_disallow_instantiation(testcase, tp, *args, **kwds):
qualname = f"{name}"
msg = f"cannot create '{re.escape(qualname)}' instances"
testcase.assertRaisesRegex(TypeError, msg, tp, *args, **kwds)
+ testcase.assertRaisesRegex(TypeError, msg, tp.__new__, tp, *args, **kwds)
def get_recursion_depth():
"""Get the recursion depth of the caller function.
diff --git a/Lib/test/support/channels.py b/Lib/test/support/channels.py
index b2de24d9d3e..fab1797659b 100644
--- a/Lib/test/support/channels.py
+++ b/Lib/test/support/channels.py
@@ -105,12 +105,8 @@ class _ChannelEnd:
return other._id == self._id
# for pickling:
- def __getnewargs__(self):
- return (int(self._id),)
-
- # for pickling:
- def __getstate__(self):
- return None
+ def __reduce__(self):
+ return (type(self), (int(self._id),))
@property
def id(self):
diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py
index 2ca5c4c6719..22ae0ef3581 100644
--- a/Lib/test/test_asyncio/test_base_events.py
+++ b/Lib/test/test_asyncio/test_base_events.py
@@ -24,6 +24,10 @@ import warnings
MOCK_ANY = mock.ANY
+class CustomError(Exception):
+ pass
+
+
def tearDownModule():
asyncio._set_event_loop_policy(None)
@@ -146,6 +150,29 @@ class BaseEventTests(test_utils.TestCase):
socket.SOCK_STREAM,
socket.IPPROTO_TCP))
+ def test_interleave_addrinfos(self):
+ self.maxDiff = None
+ SIX_A = (socket.AF_INET6, 0, 0, '', ('2001:db8::1', 1))
+ SIX_B = (socket.AF_INET6, 0, 0, '', ('2001:db8::2', 2))
+ SIX_C = (socket.AF_INET6, 0, 0, '', ('2001:db8::3', 3))
+ SIX_D = (socket.AF_INET6, 0, 0, '', ('2001:db8::4', 4))
+ FOUR_A = (socket.AF_INET, 0, 0, '', ('192.0.2.1', 5))
+ FOUR_B = (socket.AF_INET, 0, 0, '', ('192.0.2.2', 6))
+ FOUR_C = (socket.AF_INET, 0, 0, '', ('192.0.2.3', 7))
+ FOUR_D = (socket.AF_INET, 0, 0, '', ('192.0.2.4', 8))
+
+ addrinfos = [SIX_A, SIX_B, SIX_C, FOUR_A, FOUR_B, FOUR_C, FOUR_D, SIX_D]
+ expected = [SIX_A, FOUR_A, SIX_B, FOUR_B, SIX_C, FOUR_C, SIX_D, FOUR_D]
+
+ self.assertEqual(expected, base_events._interleave_addrinfos(addrinfos))
+
+ expected_fafc_2 = [SIX_A, SIX_B, FOUR_A, SIX_C, FOUR_B, SIX_D, FOUR_C, FOUR_D]
+ self.assertEqual(
+ expected_fafc_2,
+ base_events._interleave_addrinfos(addrinfos, first_address_family_count=2),
+ )
+
+
class BaseEventLoopTests(test_utils.TestCase):
@@ -1049,6 +1076,71 @@ class BaseEventLoopTests(test_utils.TestCase):
test_utils.run_briefly(self.loop)
self.assertTrue(status['finalized'])
+ @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'no IPv6 support')
+ @patch_socket
+ def test_create_connection_happy_eyeballs(self, m_socket):
+
+ class MyProto(asyncio.Protocol):
+ pass
+
+ async def getaddrinfo(*args, **kw):
+ return [(socket.AF_INET6, 0, 0, '', ('2001:db8::1', 1)),
+ (socket.AF_INET, 0, 0, '', ('192.0.2.1', 5))]
+
+ async def sock_connect(sock, address):
+ if address[0] == '2001:db8::1':
+ await asyncio.sleep(1)
+ sock.connect(address)
+
+ loop = asyncio.new_event_loop()
+ loop._add_writer = mock.Mock()
+ loop._add_writer = mock.Mock()
+ loop._add_reader = mock.Mock()
+ loop.getaddrinfo = getaddrinfo
+ loop.sock_connect = sock_connect
+
+ coro = loop.create_connection(MyProto, 'example.com', 80, happy_eyeballs_delay=0.3)
+ transport, protocol = loop.run_until_complete(coro)
+ try:
+ sock = transport._sock
+ sock.connect.assert_called_with(('192.0.2.1', 5))
+ finally:
+ transport.close()
+ test_utils.run_briefly(loop) # allow transport to close
+ loop.close()
+
+ @patch_socket
+ def test_create_connection_happy_eyeballs_ipv4_only(self, m_socket):
+
+ class MyProto(asyncio.Protocol):
+ pass
+
+ async def getaddrinfo(*args, **kw):
+ return [(socket.AF_INET, 0, 0, '', ('192.0.2.1', 5)),
+ (socket.AF_INET, 0, 0, '', ('192.0.2.2', 6))]
+
+ async def sock_connect(sock, address):
+ if address[0] == '192.0.2.1':
+ await asyncio.sleep(1)
+ sock.connect(address)
+
+ loop = asyncio.new_event_loop()
+ loop._add_writer = mock.Mock()
+ loop._add_writer = mock.Mock()
+ loop._add_reader = mock.Mock()
+ loop.getaddrinfo = getaddrinfo
+ loop.sock_connect = sock_connect
+
+ coro = loop.create_connection(MyProto, 'example.com', 80, happy_eyeballs_delay=0.3)
+ transport, protocol = loop.run_until_complete(coro)
+ try:
+ sock = transport._sock
+ sock.connect.assert_called_with(('192.0.2.2', 6))
+ finally:
+ transport.close()
+ test_utils.run_briefly(loop) # allow transport to close
+ loop.close()
+
class MyProto(asyncio.Protocol):
done = None
@@ -1190,6 +1282,36 @@ class BaseEventLoopWithSelectorTests(test_utils.TestCase):
self.loop.run_until_complete(coro)
self.assertTrue(sock.close.called)
+ @patch_socket
+ def test_create_connection_happy_eyeballs_empty_exceptions(self, m_socket):
+ # See gh-135836: Fix IndexError when Happy Eyeballs algorithm
+ # results in empty exceptions list
+
+ async def getaddrinfo(*args, **kw):
+ return [(socket.AF_INET, socket.SOCK_STREAM, 0, '', ('127.0.0.1', 80)),
+ (socket.AF_INET6, socket.SOCK_STREAM, 0, '', ('::1', 80))]
+
+ def getaddrinfo_task(*args, **kwds):
+ return self.loop.create_task(getaddrinfo(*args, **kwds))
+
+ self.loop.getaddrinfo = getaddrinfo_task
+
+ # Mock staggered_race to return empty exceptions list
+ # This simulates the scenario where Happy Eyeballs algorithm
+ # cancels all attempts but doesn't properly collect exceptions
+ with mock.patch('asyncio.staggered.staggered_race') as mock_staggered:
+ # Return (None, []) - no winner, empty exceptions list
+ async def mock_race(coro_fns, delay, loop):
+ return None, []
+ mock_staggered.side_effect = mock_race
+
+ coro = self.loop.create_connection(
+ MyProto, 'example.com', 80, happy_eyeballs_delay=0.1)
+
+ # Should raise TimeoutError instead of IndexError
+ with self.assertRaisesRegex(TimeoutError, "create_connection failed"):
+ self.loop.run_until_complete(coro)
+
def test_create_connection_host_port_sock(self):
coro = self.loop.create_connection(
MyProto, 'example.com', 80, sock=object())
@@ -1296,6 +1418,31 @@ class BaseEventLoopWithSelectorTests(test_utils.TestCase):
self.assertEqual(len(cm.exception.exceptions), 1)
self.assertIsInstance(cm.exception.exceptions[0], OSError)
+ @patch_socket
+ def test_create_connection_connect_non_os_err_close_err(self, m_socket):
+ # Test the case when sock_connect() raises non-OSError exception
+ # and sock.close() raises OSError.
+ async def getaddrinfo(*args, **kw):
+ return [(2, 1, 6, '', ('107.6.106.82', 80))]
+
+ def getaddrinfo_task(*args, **kwds):
+ return self.loop.create_task(getaddrinfo(*args, **kwds))
+
+ self.loop.getaddrinfo = getaddrinfo_task
+ self.loop.sock_connect = mock.Mock()
+ self.loop.sock_connect.side_effect = CustomError
+ sock = mock.Mock()
+ m_socket.socket.return_value = sock
+ sock.close.side_effect = OSError
+
+ coro = self.loop.create_connection(MyProto, 'example.com', 80)
+ self.assertRaises(
+ CustomError, self.loop.run_until_complete, coro)
+
+ coro = self.loop.create_connection(MyProto, 'example.com', 80, all_errors=True)
+ self.assertRaises(
+ CustomError, self.loop.run_until_complete, coro)
+
def test_create_connection_multiple(self):
async def getaddrinfo(*args, **kw):
return [(2, 1, 6, '', ('0.0.0.1', 80)),
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index d221aa5e1d9..14fe3355239 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -2991,7 +2991,8 @@ class TestType(unittest.TestCase):
def load_tests(loader, tests, pattern):
from doctest import DocTestSuite
- tests.addTest(DocTestSuite(builtins))
+ if sys.float_repr_style == 'short':
+ tests.addTest(DocTestSuite(builtins))
return tests
if __name__ == "__main__":
diff --git a/Lib/test/test_capi/test_abstract.py b/Lib/test/test_capi/test_abstract.py
index 7d548ae87c0..3a2ed9f5db8 100644
--- a/Lib/test/test_capi/test_abstract.py
+++ b/Lib/test/test_capi/test_abstract.py
@@ -1077,6 +1077,31 @@ class CAPITest(unittest.TestCase):
with self.assertRaisesRegex(TypeError, regex):
PyIter_NextItem(10)
+ def test_object_setattr_null_exc(self):
+ class Obj:
+ pass
+ obj = Obj()
+ obj.attr = 123
+
+ exc = ValueError("error")
+ with self.assertRaises(SystemError) as cm:
+ _testcapi.object_setattr_null_exc(obj, 'attr', exc)
+ self.assertIs(cm.exception.__context__, exc)
+ self.assertIsNone(cm.exception.__cause__)
+ self.assertHasAttr(obj, 'attr')
+
+ with self.assertRaises(SystemError) as cm:
+ _testcapi.object_setattrstring_null_exc(obj, 'attr', exc)
+ self.assertIs(cm.exception.__context__, exc)
+ self.assertIsNone(cm.exception.__cause__)
+ self.assertHasAttr(obj, 'attr')
+
+ with self.assertRaises(SystemError) as cm:
+ # undecodable name
+ _testcapi.object_setattrstring_null_exc(obj, b'\xff', exc)
+ self.assertIs(cm.exception.__context__, exc)
+ self.assertIsNone(cm.exception.__cause__)
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py
index e4c9a463855..7be1c9eebb3 100644
--- a/Lib/test/test_capi/test_opt.py
+++ b/Lib/test/test_capi/test_opt.py
@@ -2451,6 +2451,21 @@ class TestUopsOptimization(unittest.TestCase):
self.assertNotIn("_GUARD_TOS_FLOAT", uops)
self.assertNotIn("_GUARD_NOS_FLOAT", uops)
+ def test_binary_op_constant_evaluate(self):
+ def testfunc(n):
+ for _ in range(n):
+ 2 ** 65
+
+ testfunc(TIER2_THRESHOLD)
+
+ ex = get_first_executor(testfunc)
+ self.assertIsNotNone(ex)
+ uops = get_opnames(ex)
+
+ # For now... until we constant propagate it away.
+ self.assertIn("_BINARY_OP", uops)
+
+
def global_identity(x):
return x
diff --git a/Lib/test/test_configparser.py b/Lib/test/test_configparser.py
index 23904d17d32..e7364e18742 100644
--- a/Lib/test/test_configparser.py
+++ b/Lib/test/test_configparser.py
@@ -986,12 +986,12 @@ class ConfigParserTestCase(BasicTestCase, unittest.TestCase):
def test_defaults_keyword(self):
"""bpo-23835 fix for ConfigParser"""
- cf = self.newconfig(defaults={1: 2.4})
- self.assertEqual(cf[self.default_section]['1'], '2.4')
- self.assertAlmostEqual(cf[self.default_section].getfloat('1'), 2.4)
- cf = self.newconfig(defaults={"A": 5.2})
- self.assertEqual(cf[self.default_section]['a'], '5.2')
- self.assertAlmostEqual(cf[self.default_section].getfloat('a'), 5.2)
+ cf = self.newconfig(defaults={1: 2.5})
+ self.assertEqual(cf[self.default_section]['1'], '2.5')
+ self.assertAlmostEqual(cf[self.default_section].getfloat('1'), 2.5)
+ cf = self.newconfig(defaults={"A": 5.25})
+ self.assertEqual(cf[self.default_section]['a'], '5.25')
+ self.assertAlmostEqual(cf[self.default_section].getfloat('a'), 5.25)
class ConfigParserTestCaseNoInterpolation(BasicTestCase, unittest.TestCase):
diff --git a/Lib/test/test_ctypes/test_parameters.py b/Lib/test/test_ctypes/test_parameters.py
index f89521cf8b3..46f8ff93efa 100644
--- a/Lib/test/test_ctypes/test_parameters.py
+++ b/Lib/test/test_ctypes/test_parameters.py
@@ -1,3 +1,4 @@
+import sys
import unittest
import test.support
from ctypes import (CDLL, PyDLL, ArgumentError,
@@ -240,7 +241,8 @@ class SimpleTypesTestCase(unittest.TestCase):
self.assertRegex(repr(c_ulonglong.from_param(20000)), r"^<cparam '[LIQ]' \(20000\)>$")
self.assertEqual(repr(c_float.from_param(1.5)), "<cparam 'f' (1.5)>")
self.assertEqual(repr(c_double.from_param(1.5)), "<cparam 'd' (1.5)>")
- self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
+ if sys.float_repr_style == 'short':
+ self.assertEqual(repr(c_double.from_param(1e300)), "<cparam 'd' (1e+300)>")
self.assertRegex(repr(c_longdouble.from_param(1.5)), r"^<cparam ('d' \(1.5\)|'g' at 0x[A-Fa-f0-9]+)>$")
self.assertRegex(repr(c_char_p.from_param(b'hihi')), r"^<cparam 'z' \(0x[A-Fa-f0-9]+\)>$")
self.assertRegex(repr(c_wchar_p.from_param('hihi')), r"^<cparam 'Z' \(0x[A-Fa-f0-9]+\)>$")
diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py
index 7e8d78b8940..ae9faabd536 100644
--- a/Lib/test/test_dbm.py
+++ b/Lib/test/test_dbm.py
@@ -274,7 +274,8 @@ class WhichDBTestCase(unittest.TestCase):
@unittest.skipUnless(ndbm, reason='Test requires ndbm')
def test_whichdb_ndbm(self):
# Issue 17198: check that ndbm which is referenced in whichdb is defined
- with open(_fname + '.db', 'wb'): pass
+ with open(_fname + '.db', 'wb') as f:
+ f.write(b'spam')
_bytes_fname = os.fsencode(_fname)
fnames = [_fname, os_helper.FakePath(_fname),
_bytes_fname, os_helper.FakePath(_bytes_fname)]
diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py
index ef64b878805..08a8f4c3b36 100644
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -1089,6 +1089,15 @@ class FormatTest:
('07_', '1234.56', '1_234.56'),
('_', '1.23456789', '1.23456789'),
('_%', '123.456789', '12_345.6789%'),
+ # and now for something completely different...
+ ('.,', '1.23456789', '1.234,567,89'),
+ ('._', '1.23456789', '1.234_567_89'),
+ ('.6_f', '12345.23456789', '12345.234_568'),
+ (',._%', '123.456789', '12,345.678_9%'),
+ (',._e', '123456', '1.234_56e+5'),
+ (',.4_e', '123456', '1.234_6e+5'),
+ (',.3_e', '123456', '1.235e+5'),
+ (',._E', '123456', '1.234_56E+5'),
# negative zero: default behavior
('.1f', '-0', '-0.0'),
@@ -1162,6 +1171,10 @@ class FormatTest:
# bytes format argument
self.assertRaises(TypeError, Decimal(1).__format__, b'-020')
+ # precision or fractional part separator should follow after dot
+ self.assertRaises(ValueError, format, Decimal(1), '.f')
+ self.assertRaises(ValueError, format, Decimal(1), '._6f')
+
def test_negative_zero_format_directed_rounding(self):
with self.decimal.localcontext() as ctx:
ctx.rounding = ROUND_CEILING
diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py
index 221f9db7763..bbc7630fa83 100644
--- a/Lib/test/test_enum.py
+++ b/Lib/test/test_enum.py
@@ -36,7 +36,7 @@ def load_tests(loader, tests, ignore):
optionflags=doctest.ELLIPSIS|doctest.NORMALIZE_WHITESPACE,
))
howto_tests = os.path.join(REPO_ROOT, 'Doc/howto/enum.rst')
- if os.path.exists(howto_tests):
+ if os.path.exists(howto_tests) and sys.float_repr_style == 'short':
tests.addTests(doctest.DocFileSuite(
howto_tests,
module_relative=False,
diff --git a/Lib/test/test_external_inspection.py b/Lib/test/test_external_inspection.py
index 90214e814f2..0f31c225e68 100644
--- a/Lib/test/test_external_inspection.py
+++ b/Lib/test/test_external_inspection.py
@@ -7,7 +7,7 @@ import socket
import threading
from asyncio import staggered, taskgroups, base_events, tasks
from unittest.mock import ANY
-from test.support import os_helper, SHORT_TIMEOUT, busy_retry
+from test.support import os_helper, SHORT_TIMEOUT, busy_retry, requires_gil_enabled
from test.support.script_helper import make_script
from test.support.socket_helper import find_unused_port
@@ -876,6 +876,126 @@ class TestGetStackTrace(unittest.TestCase):
],
)
+ @skip_if_not_supported
+ @unittest.skipIf(
+ sys.platform == "linux" and not PROCESS_VM_READV_SUPPORTED,
+ "Test only runs on Linux with process_vm_readv support",
+ )
+ @requires_gil_enabled("Free threaded builds don't have an 'active thread'")
+ def test_only_active_thread(self):
+ # Test that only_active_thread parameter works correctly
+ port = find_unused_port()
+ script = textwrap.dedent(
+ f"""\
+ import time, sys, socket, threading
+
+ # Connect to the test process
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.connect(('localhost', {port}))
+
+ def worker_thread(name, barrier, ready_event):
+ barrier.wait() # Synchronize thread start
+ ready_event.wait() # Wait for main thread signal
+ # Sleep to keep thread alive
+ time.sleep(10_000)
+
+ def main_work():
+ # Do busy work to hold the GIL
+ sock.sendall(b"working\\n")
+ count = 0
+ while count < 100000000:
+ count += 1
+ if count % 10000000 == 0:
+ pass # Keep main thread busy
+ sock.sendall(b"done\\n")
+
+ # Create synchronization primitives
+ num_threads = 3
+ barrier = threading.Barrier(num_threads + 1) # +1 for main thread
+ ready_event = threading.Event()
+
+ # Start worker threads
+ threads = []
+ for i in range(num_threads):
+ t = threading.Thread(target=worker_thread, args=(f"Worker-{{i}}", barrier, ready_event))
+ t.start()
+ threads.append(t)
+
+ # Wait for all threads to be ready
+ barrier.wait()
+
+ # Signal ready to parent process
+ sock.sendall(b"ready\\n")
+
+ # Signal threads to start waiting
+ ready_event.set()
+
+ # Give threads time to start sleeping
+ time.sleep(0.1)
+
+ # Now do busy work to hold the GIL
+ main_work()
+ """
+ )
+
+ with os_helper.temp_dir() as work_dir:
+ script_dir = os.path.join(work_dir, "script_pkg")
+ os.mkdir(script_dir)
+
+ # Create a socket server to communicate with the target process
+ server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ server_socket.bind(("localhost", port))
+ server_socket.settimeout(SHORT_TIMEOUT)
+ server_socket.listen(1)
+
+ script_name = _make_test_script(script_dir, "script", script)
+ client_socket = None
+ try:
+ p = subprocess.Popen([sys.executable, script_name])
+ client_socket, _ = server_socket.accept()
+ server_socket.close()
+
+ # Wait for ready signal
+ response = b""
+ while b"ready" not in response:
+ response += client_socket.recv(1024)
+
+ # Wait for the main thread to start its busy work
+ while b"working" not in response:
+ response += client_socket.recv(1024)
+
+ # Get stack trace with all threads
+ unwinder_all = RemoteUnwinder(p.pid, all_threads=True)
+ all_traces = unwinder_all.get_stack_trace()
+
+ # Get stack trace with only GIL holder
+ unwinder_gil = RemoteUnwinder(p.pid, only_active_thread=True)
+ gil_traces = unwinder_gil.get_stack_trace()
+
+ except PermissionError:
+ self.skipTest(
+ "Insufficient permissions to read the stack trace"
+ )
+ finally:
+ if client_socket is not None:
+ client_socket.close()
+ p.kill()
+ p.terminate()
+ p.wait(timeout=SHORT_TIMEOUT)
+
+ # Verify we got multiple threads in all_traces
+ self.assertGreater(len(all_traces), 1, "Should have multiple threads")
+
+ # Verify we got exactly one thread in gil_traces
+ self.assertEqual(len(gil_traces), 1, "Should have exactly one GIL holder")
+
+ # The GIL holder should be in the all_traces list
+ gil_thread_id = gil_traces[0][0]
+ all_thread_ids = [trace[0] for trace in all_traces]
+ self.assertIn(gil_thread_id, all_thread_ids,
+ "GIL holder should be among all threads")
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_fileio.py b/Lib/test/test_fileio.py
index 5a0f033ebb8..e3d54f6315a 100644
--- a/Lib/test/test_fileio.py
+++ b/Lib/test/test_fileio.py
@@ -591,7 +591,7 @@ class OtherFileTests:
try:
f.write(b"abc")
f.close()
- with open(TESTFN_ASCII, "rb") as f:
+ with self.open(TESTFN_ASCII, "rb") as f:
self.assertEqual(f.read(), b"abc")
finally:
os.unlink(TESTFN_ASCII)
@@ -608,7 +608,7 @@ class OtherFileTests:
try:
f.write(b"abc")
f.close()
- with open(TESTFN_UNICODE, "rb") as f:
+ with self.open(TESTFN_UNICODE, "rb") as f:
self.assertEqual(f.read(), b"abc")
finally:
os.unlink(TESTFN_UNICODE)
@@ -692,13 +692,13 @@ class OtherFileTests:
def testAppend(self):
try:
- f = open(TESTFN, 'wb')
+ f = self.FileIO(TESTFN, 'wb')
f.write(b'spam')
f.close()
- f = open(TESTFN, 'ab')
+ f = self.FileIO(TESTFN, 'ab')
f.write(b'eggs')
f.close()
- f = open(TESTFN, 'rb')
+ f = self.FileIO(TESTFN, 'rb')
d = f.read()
f.close()
self.assertEqual(d, b'spameggs')
@@ -734,6 +734,7 @@ class OtherFileTests:
class COtherFileTests(OtherFileTests, unittest.TestCase):
FileIO = _io.FileIO
modulename = '_io'
+ open = _io.open
@cpython_only
def testInvalidFd_overflow(self):
@@ -755,6 +756,7 @@ class COtherFileTests(OtherFileTests, unittest.TestCase):
class PyOtherFileTests(OtherFileTests, unittest.TestCase):
FileIO = _pyio.FileIO
modulename = '_pyio'
+ open = _pyio.open
def test_open_code(self):
# Check that the default behaviour of open_code matches
diff --git a/Lib/test/test_float.py b/Lib/test/test_float.py
index 237d7b5d35e..00518abcb11 100644
--- a/Lib/test/test_float.py
+++ b/Lib/test/test_float.py
@@ -795,6 +795,8 @@ class FormatTestCase(unittest.TestCase):
self.assertRaises(ValueError, format, x, '.6,n')
@support.requires_IEEE_754
+ @unittest.skipUnless(sys.float_repr_style == 'short',
+ "applies only when using short float repr style")
def test_format_testfile(self):
with open(format_testfile, encoding="utf-8") as testfile:
for line in testfile:
diff --git a/Lib/test/test_format.py b/Lib/test/test_format.py
index c7cc32e0949..1f626d87fa6 100644
--- a/Lib/test/test_format.py
+++ b/Lib/test/test_format.py
@@ -346,12 +346,12 @@ class FormatTest(unittest.TestCase):
testcommon(b"%s", memoryview(b"abc"), b"abc")
# %a will give the equivalent of
# repr(some_obj).encode('ascii', 'backslashreplace')
- testcommon(b"%a", 3.14, b"3.14")
+ testcommon(b"%a", 3.25, b"3.25")
testcommon(b"%a", b"ghi", b"b'ghi'")
testcommon(b"%a", "jkl", b"'jkl'")
testcommon(b"%a", "\u0544", b"'\\u0544'")
# %r is an alias for %a
- testcommon(b"%r", 3.14, b"3.14")
+ testcommon(b"%r", 3.25, b"3.25")
testcommon(b"%r", b"ghi", b"b'ghi'")
testcommon(b"%r", "jkl", b"'jkl'")
testcommon(b"%r", "\u0544", b"'\\u0544'")
@@ -407,19 +407,19 @@ class FormatTest(unittest.TestCase):
self.assertEqual(format("abc", "\u2007<5"), "abc\u2007\u2007")
self.assertEqual(format(123, "\u2007<5"), "123\u2007\u2007")
- self.assertEqual(format(12.3, "\u2007<6"), "12.3\u2007\u2007")
+ self.assertEqual(format(12.5, "\u2007<6"), "12.5\u2007\u2007")
self.assertEqual(format(0j, "\u2007<4"), "0j\u2007\u2007")
self.assertEqual(format(1+2j, "\u2007<8"), "(1+2j)\u2007\u2007")
self.assertEqual(format("abc", "\u2007>5"), "\u2007\u2007abc")
self.assertEqual(format(123, "\u2007>5"), "\u2007\u2007123")
- self.assertEqual(format(12.3, "\u2007>6"), "\u2007\u200712.3")
+ self.assertEqual(format(12.5, "\u2007>6"), "\u2007\u200712.5")
self.assertEqual(format(1+2j, "\u2007>8"), "\u2007\u2007(1+2j)")
self.assertEqual(format(0j, "\u2007>4"), "\u2007\u20070j")
self.assertEqual(format("abc", "\u2007^5"), "\u2007abc\u2007")
self.assertEqual(format(123, "\u2007^5"), "\u2007123\u2007")
- self.assertEqual(format(12.3, "\u2007^6"), "\u200712.3\u2007")
+ self.assertEqual(format(12.5, "\u2007^6"), "\u200712.5\u2007")
self.assertEqual(format(1+2j, "\u2007^8"), "\u2007(1+2j)\u2007")
self.assertEqual(format(0j, "\u2007^4"), "\u20070j\u2007")
diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py
index d1d2739856c..cf42b86358d 100644
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -1322,6 +1322,8 @@ class FractionTest(unittest.TestCase):
# Thousands separators
(F('1234567.123456'), ',.5e', '1.23457e+06'),
(F('123.123456'), '012_.2e', '0_001.23e+02'),
+ # Thousands separators for fractional part (or for integral too)
+ (F('1234567.123456'), '.5_e', '1.234_57e+06'),
# z flag is legal, but never makes a difference to the output
(F(-1, 7**100), 'z.6e', '-3.091690e-85'),
]
@@ -1447,6 +1449,12 @@ class FractionTest(unittest.TestCase):
(F('1234567'), ',.2f', '1,234,567.00'),
(F('12345678'), ',.2f', '12,345,678.00'),
(F('12345678'), ',f', '12,345,678.000000'),
+ # Thousands separators for fractional part (or for integral too)
+ (F('123456.789123123'), '._f', '123456.789_123'),
+ (F('123456.789123123'), '.7_f', '123456.789_123_1'),
+ (F('123456.789123123'), '.9_f', '123456.789_123_123'),
+ (F('123456.789123123'), '.,f', '123456.789,123'),
+ (F('123456.789123123'), '_.,f', '123_456.789,123'),
# Underscore as thousands separator
(F(2, 3), '_.2f', '0.67'),
(F(2, 3), '_.7f', '0.6666667'),
@@ -1480,11 +1488,8 @@ class FractionTest(unittest.TestCase):
(F('-1234.5678'), '08,.0f', '-001,235'),
(F('-1234.5678'), '09,.0f', '-0,001,235'),
# Corner-case - zero-padding specified through fill and align
- # instead of the zero-pad character - in this case, treat '0' as a
- # regular fill character and don't attempt to insert commas into
- # the filled portion. This differs from the int and float
- # behaviour.
- (F('1234.5678'), '0=12,.2f', '00001,234.57'),
+ # instead of the zero-pad character.
+ (F('1234.5678'), '0=12,.2f', '0,001,234.57'),
# Corner case where it's not clear whether the '0' indicates zero
# padding or gives the minimum width, but there's still an obvious
# answer to give. We want this to work in case the minimum width
@@ -1623,6 +1628,11 @@ class FractionTest(unittest.TestCase):
'.f',
'.g',
'.%',
+ # Thousands separators before precision
+ '._6e',
+ '._6f',
+ '._6g',
+ '._6%',
# Z instead of z for negative zero suppression
'Z.2f'
# z flag not supported for general formatting
diff --git a/Lib/test/test_free_threading/test_io.py b/Lib/test/test_free_threading/test_io.py
index f9bec740ddf..41d89e04da8 100644
--- a/Lib/test/test_free_threading/test_io.py
+++ b/Lib/test/test_free_threading/test_io.py
@@ -1,12 +1,13 @@
+import io
+import _pyio as pyio
import threading
from unittest import TestCase
from test.support import threading_helper
from random import randint
-from io import BytesIO
from sys import getsizeof
-class TestBytesIO(TestCase):
+class ThreadSafetyMixin:
# Test pretty much everything that can break under free-threading.
# Non-deterministic, but at least one of these things will fail if
# BytesIO object is not free-thread safe.
@@ -90,20 +91,27 @@ class TestBytesIO(TestCase):
barrier.wait()
getsizeof(b)
- self.check([write] * 10, BytesIO())
- self.check([writelines] * 10, BytesIO())
- self.check([write] * 10 + [truncate] * 10, BytesIO())
- self.check([truncate] + [read] * 10, BytesIO(b'0\n'*204800))
- self.check([truncate] + [read1] * 10, BytesIO(b'0\n'*204800))
- self.check([truncate] + [readline] * 10, BytesIO(b'0\n'*20480))
- self.check([truncate] + [readlines] * 10, BytesIO(b'0\n'*20480))
- self.check([truncate] + [readinto] * 10, BytesIO(b'0\n'*204800), bytearray(b'0\n'*204800))
- self.check([close] + [write] * 10, BytesIO())
- self.check([truncate] + [getvalue] * 10, BytesIO(b'0\n'*204800))
- self.check([truncate] + [getbuffer] * 10, BytesIO(b'0\n'*204800))
- self.check([truncate] + [iter] * 10, BytesIO(b'0\n'*20480))
- self.check([truncate] + [getstate] * 10, BytesIO(b'0\n'*204800))
- self.check([truncate] + [setstate] * 10, BytesIO(b'0\n'*204800), (b'123', 0, None))
- self.check([truncate] + [sizeof] * 10, BytesIO(b'0\n'*204800))
+ self.check([write] * 10, self.ioclass())
+ self.check([writelines] * 10, self.ioclass())
+ self.check([write] * 10 + [truncate] * 10, self.ioclass())
+ self.check([truncate] + [read] * 10, self.ioclass(b'0\n'*204800))
+ self.check([truncate] + [read1] * 10, self.ioclass(b'0\n'*204800))
+ self.check([truncate] + [readline] * 10, self.ioclass(b'0\n'*20480))
+ self.check([truncate] + [readlines] * 10, self.ioclass(b'0\n'*20480))
+ self.check([truncate] + [readinto] * 10, self.ioclass(b'0\n'*204800), bytearray(b'0\n'*204800))
+ self.check([close] + [write] * 10, self.ioclass())
+ self.check([truncate] + [getvalue] * 10, self.ioclass(b'0\n'*204800))
+ self.check([truncate] + [getbuffer] * 10, self.ioclass(b'0\n'*204800))
+ self.check([truncate] + [iter] * 10, self.ioclass(b'0\n'*20480))
+ self.check([truncate] + [getstate] * 10, self.ioclass(b'0\n'*204800))
+ state = self.ioclass(b'123').__getstate__()
+ self.check([truncate] + [setstate] * 10, self.ioclass(b'0\n'*204800), state)
+ self.check([truncate] + [sizeof] * 10, self.ioclass(b'0\n'*204800))
# no tests for seek or tell because they don't break anything
+
+class CBytesIOTest(ThreadSafetyMixin, TestCase):
+ ioclass = io.BytesIO
+
+class PyBytesIOTest(ThreadSafetyMixin, TestCase):
+ ioclass = pyio.BytesIO
diff --git a/Lib/test/test_free_threading/test_itertools.py b/Lib/test/test_free_threading/test_itertools.py
index b8663ade1d4..9d366041917 100644
--- a/Lib/test/test_free_threading/test_itertools.py
+++ b/Lib/test/test_free_threading/test_itertools.py
@@ -1,6 +1,6 @@
import unittest
from threading import Thread, Barrier
-from itertools import batched, cycle
+from itertools import batched, chain, cycle
from test.support import threading_helper
@@ -17,7 +17,7 @@ class ItertoolsThreading(unittest.TestCase):
barrier.wait()
while True:
try:
- _ = next(it)
+ next(it)
except StopIteration:
break
@@ -62,6 +62,34 @@ class ItertoolsThreading(unittest.TestCase):
barrier.reset()
+ @threading_helper.reap_threads
+ def test_chain(self):
+ number_of_threads = 6
+ number_of_iterations = 20
+
+ barrier = Barrier(number_of_threads)
+ def work(it):
+ barrier.wait()
+ while True:
+ try:
+ next(it)
+ except StopIteration:
+ break
+
+ data = [(1, )] * 200
+ for it in range(number_of_iterations):
+ chain_iterator = chain(*data)
+ worker_threads = []
+ for ii in range(number_of_threads):
+ worker_threads.append(
+ Thread(target=work, args=[chain_iterator]))
+
+ with threading_helper.start_threads(worker_threads):
+ pass
+
+ barrier.reset()
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_free_threading/test_itertools_combinatoric.py b/Lib/test/test_free_threading/test_itertools_combinatoric.py
new file mode 100644
index 00000000000..5b3b88deedd
--- /dev/null
+++ b/Lib/test/test_free_threading/test_itertools_combinatoric.py
@@ -0,0 +1,51 @@
+import unittest
+from threading import Thread, Barrier
+from itertools import combinations, product
+from test.support import threading_helper
+
+
+threading_helper.requires_working_threading(module=True)
+
+def test_concurrent_iteration(iterator, number_of_threads):
+ barrier = Barrier(number_of_threads)
+ def iterator_worker(it):
+ barrier.wait()
+ while True:
+ try:
+ _ = next(it)
+ except StopIteration:
+ return
+
+ worker_threads = []
+ for ii in range(number_of_threads):
+ worker_threads.append(
+ Thread(target=iterator_worker, args=[iterator]))
+
+ with threading_helper.start_threads(worker_threads):
+ pass
+
+ barrier.reset()
+
+class ItertoolsThreading(unittest.TestCase):
+
+ @threading_helper.reap_threads
+ def test_combinations(self):
+ number_of_threads = 10
+ number_of_iterations = 24
+
+ for it in range(number_of_iterations):
+ iterator = combinations((1, 2, 3, 4, 5), 2)
+ test_concurrent_iteration(iterator, number_of_threads)
+
+ @threading_helper.reap_threads
+ def test_product(self):
+ number_of_threads = 10
+ number_of_iterations = 24
+
+ for it in range(number_of_iterations):
+ iterator = product((1, 2, 3, 4, 5), (10, 20, 30))
+ test_concurrent_iteration(iterator, number_of_threads)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py
index f5455705678..58a30c8e6ac 100644
--- a/Lib/test/test_fstring.py
+++ b/Lib/test/test_fstring.py
@@ -1336,9 +1336,9 @@ x = (
def test_conversions(self):
self.assertEqual(f'{3.14:10.10}', ' 3.14')
- self.assertEqual(f'{3.14!s:10.10}', '3.14 ')
- self.assertEqual(f'{3.14!r:10.10}', '3.14 ')
- self.assertEqual(f'{3.14!a:10.10}', '3.14 ')
+ self.assertEqual(f'{1.25!s:10.10}', '1.25 ')
+ self.assertEqual(f'{1.25!r:10.10}', '1.25 ')
+ self.assertEqual(f'{1.25!a:10.10}', '1.25 ')
self.assertEqual(f'{"a"}', 'a')
self.assertEqual(f'{"a"!r}', "'a'")
@@ -1347,7 +1347,7 @@ x = (
# Conversions can have trailing whitespace after them since it
# does not provide any significance
self.assertEqual(f"{3!s }", "3")
- self.assertEqual(f'{3.14!s :10.10}', '3.14 ')
+ self.assertEqual(f'{1.25!s :10.10}', '1.25 ')
# Not a conversion.
self.assertEqual(f'{"a!r"}', "a!r")
diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py
index 9e0fd1218f2..81d4e39f5be 100644
--- a/Lib/test/test_generated_cases.py
+++ b/Lib/test/test_generated_cases.py
@@ -2224,5 +2224,249 @@ class TestGeneratedAbstractCases(unittest.TestCase):
"Inputs must have equal sizes"):
self.run_cases_test(input, input2, output)
+ def test_pure_uop_body_copied_in(self):
+ # Note: any non-escaping call works.
+ # In this case, we use PyStackRef_IsNone.
+ input = """
+ pure op(OP, (foo -- res)) {
+ res = PyStackRef_IsNone(foo);
+ }
+ """
+ input2 = """
+ op(OP, (foo -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(foo);
+ res = sym_new_known(ctx, foo);
+ }
+ """
+ output = """
+ case OP: {
+ JitOptRef foo;
+ JitOptRef res;
+ foo = stack_pointer[-1];
+ if (
+ sym_is_safe_const(ctx, foo)
+ ) {
+ JitOptRef foo_sym = foo;
+ _PyStackRef foo = sym_get_const_as_stackref(ctx, foo_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ res_stackref = PyStackRef_IsNone(foo);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-1] = res;
+ break;
+ }
+ res = sym_new_known(ctx, foo);
+ stack_pointer[-1] = res;
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+ def test_pure_uop_body_copied_in_deopt(self):
+ # Note: any non-escaping call works.
+ # In this case, we use PyStackRef_IsNone.
+ input = """
+ pure op(OP, (foo -- res)) {
+ DEOPT_IF(PyStackRef_IsNull(foo));
+ res = foo;
+ }
+ """
+ input2 = """
+ op(OP, (foo -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(foo);
+ res = foo;
+ }
+ """
+ output = """
+ case OP: {
+ JitOptRef foo;
+ JitOptRef res;
+ foo = stack_pointer[-1];
+ if (
+ sym_is_safe_const(ctx, foo)
+ ) {
+ JitOptRef foo_sym = foo;
+ _PyStackRef foo = sym_get_const_as_stackref(ctx, foo_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ if (PyStackRef_IsNull(foo)) {
+ ctx->done = true;
+ break;
+ }
+ res_stackref = foo;
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-1] = res;
+ break;
+ }
+ res = foo;
+ stack_pointer[-1] = res;
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+ def test_pure_uop_body_copied_in_error_if(self):
+ # Note: any non-escaping call works.
+ # In this case, we use PyStackRef_IsNone.
+ input = """
+ pure op(OP, (foo -- res)) {
+ ERROR_IF(PyStackRef_IsNull(foo));
+ res = foo;
+ }
+ """
+ input2 = """
+ op(OP, (foo -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(foo);
+ res = foo;
+ }
+ """
+ output = """
+ case OP: {
+ JitOptRef foo;
+ JitOptRef res;
+ foo = stack_pointer[-1];
+ if (
+ sym_is_safe_const(ctx, foo)
+ ) {
+ JitOptRef foo_sym = foo;
+ _PyStackRef foo = sym_get_const_as_stackref(ctx, foo_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ if (PyStackRef_IsNull(foo)) {
+ goto error;
+ }
+ res_stackref = foo;
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-1] = res;
+ break;
+ }
+ res = foo;
+ stack_pointer[-1] = res;
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+
+ def test_replace_opcode_uop_body_copied_in_complex(self):
+ input = """
+ pure op(OP, (foo -- res)) {
+ if (foo) {
+ res = PyStackRef_IsNone(foo);
+ }
+ else {
+ res = 1;
+ }
+ }
+ """
+ input2 = """
+ op(OP, (foo -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(foo);
+ res = sym_new_known(ctx, foo);
+ }
+ """
+ output = """
+ case OP: {
+ JitOptRef foo;
+ JitOptRef res;
+ foo = stack_pointer[-1];
+ if (
+ sym_is_safe_const(ctx, foo)
+ ) {
+ JitOptRef foo_sym = foo;
+ _PyStackRef foo = sym_get_const_as_stackref(ctx, foo_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ if (foo) {
+ res_stackref = PyStackRef_IsNone(foo);
+ }
+ else {
+ res_stackref = 1;
+ }
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-1] = res;
+ break;
+ }
+ res = sym_new_known(ctx, foo);
+ stack_pointer[-1] = res;
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+ def test_replace_opcode_escaping_uop_body_copied_in_complex(self):
+ input = """
+ pure op(OP, (foo -- res)) {
+ if (foo) {
+ res = ESCAPING_CODE(foo);
+ }
+ else {
+ res = 1;
+ }
+ }
+ """
+ input2 = """
+ op(OP, (foo -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(foo);
+ res = sym_new_known(ctx, foo);
+ }
+ """
+ output = """
+ case OP: {
+ JitOptRef foo;
+ JitOptRef res;
+ foo = stack_pointer[-1];
+ if (
+ sym_is_safe_const(ctx, foo)
+ ) {
+ JitOptRef foo_sym = foo;
+ _PyStackRef foo = sym_get_const_as_stackref(ctx, foo_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ if (foo) {
+ res_stackref = ESCAPING_CODE(foo);
+ }
+ else {
+ res_stackref = 1;
+ }
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-1] = res;
+ break;
+ }
+ res = sym_new_known(ctx, foo);
+ stack_pointer[-1] = res;
+ break;
+ }
+ """
+ self.run_cases_test(input, input2, output)
+
+ def test_replace_opocode_uop_reject_array_effects(self):
+ input = """
+ pure op(OP, (foo[2] -- res)) {
+ if (foo) {
+ res = PyStackRef_IsNone(foo);
+ }
+ else {
+ res = 1;
+ }
+ }
+ """
+ input2 = """
+ op(OP, (foo[2] -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(foo);
+ res = sym_new_unknown(ctx);
+ }
+ """
+ output = """
+ """
+ with self.assertRaisesRegex(SyntaxError,
+ "Pure evaluation cannot take array-like inputs"):
+ self.run_cases_test(input, input2, output)
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_getpath.py b/Lib/test/test_getpath.py
index f86df9d0d03..83f09f34955 100644
--- a/Lib/test/test_getpath.py
+++ b/Lib/test/test_getpath.py
@@ -354,6 +354,27 @@ class MockGetPathTests(unittest.TestCase):
actual = getpath(ns, expected)
self.assertEqual(expected, actual)
+ def test_venv_posix_without_home_key(self):
+ ns = MockPosixNamespace(
+ argv0="/venv/bin/python3",
+ PREFIX="/usr",
+ ENV_PATH="/usr/bin",
+ )
+ # Setup the bare minimum venv
+ ns.add_known_xfile("/usr/bin/python3")
+ ns.add_known_xfile("/venv/bin/python3")
+ ns.add_known_link("/venv/bin/python3", "/usr/bin/python3")
+ ns.add_known_file("/venv/pyvenv.cfg", [
+ # home = key intentionally omitted
+ ])
+ expected = dict(
+ executable="/venv/bin/python3",
+ prefix="/venv",
+ base_prefix="/usr",
+ )
+ actual = getpath(ns, expected)
+ self.assertEqual(expected, actual)
+
def test_venv_changed_name_posix(self):
"Test a venv layout on *nix."
ns = MockPosixNamespace(
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 7b378c45e71..5bad483ae9d 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -279,7 +279,10 @@ class HashLibTestCase(unittest.TestCase):
with self.assertWarnsRegex(DeprecationWarning,
DEPRECATED_STRING_PARAMETER):
hashlib.new(digest_name, string=b'')
- if self._hashlib:
+ # Make sure that _hashlib contains the constructor
+ # to test when using a combination of libcrypto and
+ # interned hash implementations.
+ if self._hashlib and digest_name in self._hashlib._constructors:
self._hashlib.new(digest_name, b'')
self._hashlib.new(digest_name, data=b'')
with self.assertWarnsRegex(DeprecationWarning,
@@ -333,7 +336,8 @@ class HashLibTestCase(unittest.TestCase):
with self.subTest(digest_name, args=args, kwds=kwds):
with self.assertRaisesRegex(TypeError, errmsg):
hashlib.new(digest_name, *args, **kwds)
- if self._hashlib:
+ if (self._hashlib and
+ digest_name in self._hashlib._constructors):
with self.assertRaisesRegex(TypeError, errmsg):
self._hashlib.new(digest_name, *args, **kwds)
diff --git a/Lib/test/test_htmlparser.py b/Lib/test/test_htmlparser.py
index 65a4bee72b9..15cad061889 100644
--- a/Lib/test/test_htmlparser.py
+++ b/Lib/test/test_htmlparser.py
@@ -81,6 +81,13 @@ class EventCollectorCharrefs(EventCollector):
self.fail('This should never be called with convert_charrefs=True')
+# The normal event collector normalizes the events in get_events,
+# so we override it to return the original list of events.
+class EventCollectorNoNormalize(EventCollector):
+ def get_events(self):
+ return self.events
+
+
class TestCaseBase(unittest.TestCase):
def get_collector(self):
@@ -265,8 +272,7 @@ text
("starttag", "foo:bar", [("one", "1"), ("two", "2")]),
("starttag_text", s)])
- def test_cdata_content(self):
- contents = [
+ @support.subTests('content', [
'<!-- not a comment --> &not-an-entity-ref;',
"<not a='start tag'>",
'<a href="" /> <p> <span></span>',
@@ -279,70 +285,127 @@ text
'src="http://www.example.org/r=\'+new '
'Date().getTime()+\'"><\\/s\'+\'cript>\');\n//]]>'),
'\n<!-- //\nvar foo = 3.14;\n// -->\n',
- 'foo = "</sty" + "le>";',
'<!-- \u2603 -->',
- # these two should be invalid according to the HTML 5 spec,
- # section 8.1.2.2
- #'foo = </\nscript>',
- #'foo = </ script>',
- ]
- elements = ['script', 'style', 'SCRIPT', 'STYLE', 'Script', 'Style']
- for content in contents:
- for element in elements:
- element_lower = element.lower()
- s = '<{element}>{content}</{element}>'.format(element=element,
- content=content)
- self._run_check(s, [("starttag", element_lower, []),
- ("data", content),
- ("endtag", element_lower)])
-
- def test_cdata_with_closing_tags(self):
+ 'foo = "</ script>"',
+ 'foo = "</scripture>"',
+ 'foo = "</script\v>"',
+ 'foo = "</script\xa0>"',
+ 'foo = "</ſcript>"',
+ 'foo = "</scrıpt>"',
+ ])
+ def test_script_content(self, content):
+ s = f'<script>{content}</script>'
+ self._run_check(s, [("starttag", "script", []),
+ ("data", content),
+ ("endtag", "script")])
+
+ @support.subTests('content', [
+ 'a::before { content: "<!-- not a comment -->"; }',
+ 'a::before { content: "&not-an-entity-ref;"; }',
+ 'a::before { content: "<not a=\'start tag\'>"; }',
+ 'a::before { content: "\u2603"; }',
+ 'a::before { content: "< /style>"; }',
+ 'a::before { content: "</ style>"; }',
+ 'a::before { content: "</styled>"; }',
+ 'a::before { content: "</style\v>"; }',
+ 'a::before { content: "</style\xa0>"; }',
+ 'a::before { content: "</ſtyle>"; }',
+ ])
+ def test_style_content(self, content):
+ s = f'<style>{content}</style>'
+ self._run_check(s, [("starttag", "style", []),
+ ("data", content),
+ ("endtag", "style")])
+
+ @support.subTests('endtag', ['script', 'SCRIPT', 'script ', 'script\n',
+ 'script/', 'script foo=bar', 'script foo=">"'])
+ def test_script_closing_tag(self, endtag):
# see issue #13358
# make sure that HTMLParser calls handle_data only once for each CDATA.
- # The normal event collector normalizes the events in get_events,
- # so we override it to return the original list of events.
- class Collector(EventCollector):
- def get_events(self):
- return self.events
-
- content = """<!-- not a comment --> &not-an-entity-ref;
- <a href="" /> </p><p> <span></span></style>
- '</script' + '>'"""
- for element in [' script', 'script ', ' script ',
- '\nscript', 'script\n', '\nscript\n']:
- element_lower = element.lower().strip()
- s = '<script>{content}</{element}>'.format(element=element,
- content=content)
- self._run_check(s, [("starttag", element_lower, []),
- ("data", content),
- ("endtag", element_lower)],
- collector=Collector(convert_charrefs=False))
-
- def test_EOF_in_cdata(self):
content = """<!-- not a comment --> &not-an-entity-ref;
<a href="" /> </p><p> <span></span></style>
'</script' + '>'"""
- s = f'<script>{content}'
- self._run_check(s, [
- ("starttag", 'script', []),
- ("data", content)
- ])
+ s = f'<ScrIPt>{content}</{endtag}>'
+ self._run_check(s, [("starttag", "script", []),
+ ("data", content),
+ ("endtag", "script")],
+ collector=EventCollectorNoNormalize(convert_charrefs=False))
+
+ @support.subTests('endtag', ['style', 'STYLE', 'style ', 'style\n',
+ 'style/', 'style foo=bar', 'style foo=">"'])
+ def test_style_closing_tag(self, endtag):
+ content = """
+ b::before { content: "<!-- not a comment -->"; }
+ p::before { content: "&not-an-entity-ref;"; }
+ a::before { content: "<i>"; }
+ a::after { content: "</i>"; }
+ """
+ s = f'<StyLE>{content}</{endtag}>'
+ self._run_check(s, [("starttag", "style", []),
+ ("data", content),
+ ("endtag", "style")],
+ collector=EventCollectorNoNormalize(convert_charrefs=False))
+
+ @support.subTests('tail,end', [
+ ('', False),
+ ('<', False),
+ ('</', False),
+ ('</s', False),
+ ('</script', False),
+ ('</script ', True),
+ ('</script foo=bar', True),
+ ('</script foo=">', True),
+ ])
+ def test_eof_in_script(self, tail, end):
+ content = "a = 123"
+ s = f'<ScrIPt>{content}{tail}'
+ self._run_check(s, [("starttag", "script", []),
+ ("data", content if end else content + tail)],
+ collector=EventCollectorNoNormalize(convert_charrefs=False))
def test_comments(self):
html = ("<!-- I'm a valid comment -->"
'<!--me too!-->'
'<!------>'
+ '<!----->'
'<!---->'
+ # abrupt-closing-of-empty-comment
+ '<!--->'
+ '<!-->'
'<!----I have many hyphens---->'
'<!-- I have a > in the middle -->'
- '<!-- and I have -- in the middle! -->')
+ '<!-- and I have -- in the middle! -->'
+ '<!--incorrectly-closed-comment--!>'
+ '<!----!>'
+ '<!----!-->'
+ '<!---- >-->'
+ '<!---!>-->'
+ '<!--!>-->'
+ # nested-comment
+ '<!-- <!-- nested --> -->'
+ '<!--<!-->'
+ '<!--<!--!>'
+ )
expected = [('comment', " I'm a valid comment "),
('comment', 'me too!'),
('comment', '--'),
+ ('comment', '-'),
+ ('comment', ''),
+ ('comment', ''),
('comment', ''),
('comment', '--I have many hyphens--'),
('comment', ' I have a > in the middle '),
- ('comment', ' and I have -- in the middle! ')]
+ ('comment', ' and I have -- in the middle! '),
+ ('comment', 'incorrectly-closed-comment'),
+ ('comment', ''),
+ ('comment', '--!'),
+ ('comment', '-- >'),
+ ('comment', '-!>'),
+ ('comment', '!>'),
+ ('comment', ' <!-- nested '), ('data', ' -->'),
+ ('comment', '<!'),
+ ('comment', '<!'),
+ ]
self._run_check(html, expected)
def test_condcoms(self):
@@ -443,7 +506,7 @@ text
self._run_check("</$>", [('comment', '$')])
self._run_check("</", [('data', '</')])
self._run_check("</a", [])
- self._run_check("</ a>", [('endtag', 'a')])
+ self._run_check("</ a>", [('comment', ' a')])
self._run_check("</ a", [('comment', ' a')])
self._run_check("<a<a>", [('starttag', 'a<a', [])])
self._run_check("</a<a>", [('endtag', 'a<a')])
@@ -491,6 +554,10 @@ text
]
self._run_check(html, expected)
+ def test_slashes_in_endtag(self):
+ self._run_check('</a/>', [('endtag', 'a')])
+ self._run_check('</a foo="var"/>', [('endtag', 'a')])
+
def test_declaration_junk_chars(self):
self._run_check("<!DOCTYPE foo $ >", [('decl', 'DOCTYPE foo $ ')])
@@ -525,15 +592,11 @@ text
self._run_check(html, expected)
def test_broken_invalid_end_tag(self):
- # This is technically wrong (the "> shouldn't be included in the 'data')
- # but is probably not worth fixing it (in addition to all the cases of
- # the previous test, it would require a full attribute parsing).
- # see #13993
html = '<b>This</b attr=">"> confuses the parser'
expected = [('starttag', 'b', []),
('data', 'This'),
('endtag', 'b'),
- ('data', '"> confuses the parser')]
+ ('data', ' confuses the parser')]
self._run_check(html, expected)
def test_correct_detection_of_start_tags(self):
@@ -560,7 +623,7 @@ text
html = '<div style="", foo = "bar" ><b>The <a href="some_url">rain</a>'
expected = [
- ('starttag', 'div', [('style', ''), (',', None), ('foo', 'bar')]),
+ ('starttag', 'div', [('style', ''), (',', None), ('foo', None), ('=', None), ('"bar"', None)]),
('starttag', 'b', []),
('data', 'The '),
('starttag', 'a', [('href', 'some_url')]),
@@ -749,9 +812,15 @@ class AttributesTestCase(TestCaseBase):
("starttag", "a", [("b", "v"), ("c", "v"), ("d", "v"), ("e", None)])
]
self._run_check("""<a b='v' c="v" d=v e>""", output)
- self._run_check("""<a b = 'v' c = "v" d = v e>""", output)
- self._run_check("""<a\nb\n=\n'v'\nc\n=\n"v"\nd\n=\nv\ne>""", output)
- self._run_check("""<a\tb\t=\t'v'\tc\t=\t"v"\td\t=\tv\te>""", output)
+ self._run_check("<a foo==bar>", [('starttag', 'a', [('foo', '=bar')])])
+ self._run_check("<a foo =bar>", [('starttag', 'a', [('foo', None), ('=bar', None)])])
+ self._run_check("<a foo\t=bar>", [('starttag', 'a', [('foo', None), ('=bar', None)])])
+ self._run_check("<a foo\v=bar>", [('starttag', 'a', [('foo\v', 'bar')])])
+ self._run_check("<a foo\xa0=bar>", [('starttag', 'a', [('foo\xa0', 'bar')])])
+ self._run_check("<a foo= bar>", [('starttag', 'a', [('foo', ''), ('bar', None)])])
+ self._run_check("<a foo=\tbar>", [('starttag', 'a', [('foo', ''), ('bar', None)])])
+ self._run_check("<a foo=\vbar>", [('starttag', 'a', [('foo', '\vbar')])])
+ self._run_check("<a foo=\xa0bar>", [('starttag', 'a', [('foo', '\xa0bar')])])
def test_attr_values(self):
self._run_check("""<a b='xxx\n\txxx' c="yyy\t\nyyy" d='\txyz\n'>""",
@@ -760,6 +829,10 @@ class AttributesTestCase(TestCaseBase):
("d", "\txyz\n")])])
self._run_check("""<a b='' c="">""",
[("starttag", "a", [("b", ""), ("c", "")])])
+ self._run_check("<a b=\t c=\n>",
+ [("starttag", "a", [("b", ""), ("c", "")])])
+ self._run_check("<a b=\v c=\xa0>",
+ [("starttag", "a", [("b", "\v"), ("c", "\xa0")])])
# Regression test for SF patch #669683.
self._run_check("<e a=rgb(1,2,3)>",
[("starttag", "e", [("a", "rgb(1,2,3)")])])
@@ -831,7 +904,7 @@ class AttributesTestCase(TestCaseBase):
('data', 'test - bad2'), ('endtag', 'a'),
('starttag', 'a', [('href', "test'\xa0style='color:red;bad3'")]),
('data', 'test - bad3'), ('endtag', 'a'),
- ('starttag', 'a', [('href', "test'\xa0style='color:red;bad4'")]),
+ ('starttag', 'a', [('href', None), ('=', None), ("test'&nbsp;style", 'color:red;bad4')]),
('data', 'test - bad4'), ('endtag', 'a')
]
self._run_check(html, expected)
diff --git a/Lib/test/test_interpreters/test_api.py b/Lib/test/test_interpreters/test_api.py
index 0ee4582b5d1..a34b20beaca 100644
--- a/Lib/test/test_interpreters/test_api.py
+++ b/Lib/test/test_interpreters/test_api.py
@@ -412,9 +412,11 @@ class InterpreterObjectTests(TestBase):
def test_pickle(self):
interp = interpreters.create()
- data = pickle.dumps(interp)
- unpickled = pickle.loads(data)
- self.assertEqual(unpickled, interp)
+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(protocol=protocol):
+ data = pickle.dumps(interp, protocol)
+ unpickled = pickle.loads(data)
+ self.assertEqual(unpickled, interp)
class TestInterpreterIsRunning(TestBase):
diff --git a/Lib/test/test_interpreters/test_channels.py b/Lib/test/test_interpreters/test_channels.py
index 109ddf34453..52827357078 100644
--- a/Lib/test/test_interpreters/test_channels.py
+++ b/Lib/test/test_interpreters/test_channels.py
@@ -121,9 +121,11 @@ class TestRecvChannelAttrs(TestBase):
def test_pickle(self):
ch, _ = channels.create()
- data = pickle.dumps(ch)
- unpickled = pickle.loads(data)
- self.assertEqual(unpickled, ch)
+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(protocol=protocol):
+ data = pickle.dumps(ch, protocol)
+ unpickled = pickle.loads(data)
+ self.assertEqual(unpickled, ch)
class TestSendChannelAttrs(TestBase):
@@ -152,9 +154,11 @@ class TestSendChannelAttrs(TestBase):
def test_pickle(self):
_, ch = channels.create()
- data = pickle.dumps(ch)
- unpickled = pickle.loads(data)
- self.assertEqual(unpickled, ch)
+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(protocol=protocol):
+ data = pickle.dumps(ch, protocol)
+ unpickled = pickle.loads(data)
+ self.assertEqual(unpickled, ch)
class TestSendRecv(TestBase):
diff --git a/Lib/test/test_interpreters/test_queues.py b/Lib/test/test_interpreters/test_queues.py
index cb17340f581..5451c6654ac 100644
--- a/Lib/test/test_interpreters/test_queues.py
+++ b/Lib/test/test_interpreters/test_queues.py
@@ -188,9 +188,11 @@ class QueueTests(TestBase):
def test_pickle(self):
queue = queues.create()
- data = pickle.dumps(queue)
- unpickled = pickle.loads(data)
- self.assertEqual(unpickled, queue)
+ for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(protocol=protocol):
+ data = pickle.dumps(queue, protocol)
+ unpickled = pickle.loads(data)
+ self.assertEqual(unpickled, queue)
class TestQueueOps(TestBase):
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 0c921ffbc25..b487bcabf01 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -9,6 +9,7 @@
# * test_univnewlines - tests universal newline support
# * test_largefile - tests operations on a file greater than 2**32 bytes
# (only enabled with -ulargefile)
+# * test_free_threading/test_io - tests thread safety of io objects
################################################################################
# ATTENTION TEST WRITERS!!!
diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py
index 1b9f3cf7624..18e4b676c53 100644
--- a/Lib/test/test_iter.py
+++ b/Lib/test/test_iter.py
@@ -1147,7 +1147,7 @@ class TestCase(unittest.TestCase):
def test_exception_locations(self):
# The location of an exception raised from __init__ or
- # __next__ should should be the iterator expression
+ # __next__ should be the iterator expression
def init_raises():
try:
diff --git a/Lib/test/test_json/test_tool.py b/Lib/test/test_json/test_tool.py
index 9ea2679c77e..30f9bb33316 100644
--- a/Lib/test/test_json/test_tool.py
+++ b/Lib/test/test_json/test_tool.py
@@ -270,7 +270,7 @@ class TestMain(unittest.TestCase):
(r'" \"foo\" "', f'{t.string}" \\"foo\\" "{t.reset}'),
('"α"', f'{t.string}"\\u03b1"{t.reset}'),
('123', f'{t.number}123{t.reset}'),
- ('-1.2345e+23', f'{t.number}-1.2345e+23{t.reset}'),
+ ('-1.25e+23', f'{t.number}-1.25e+23{t.reset}'),
(r'{"\\": ""}',
f'''\
{ob}
diff --git a/Lib/test/test_listcomps.py b/Lib/test/test_listcomps.py
index cffdeeacc5d..70148dc30fc 100644
--- a/Lib/test/test_listcomps.py
+++ b/Lib/test/test_listcomps.py
@@ -716,7 +716,7 @@ class ListComprehensionTest(unittest.TestCase):
def test_exception_locations(self):
# The location of an exception raised from __init__ or
- # __next__ should should be the iterator expression
+ # __next__ should be the iterator expression
def init_raises():
try:
diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py
index 384ad5c828d..46cb54647b1 100644
--- a/Lib/test/test_math.py
+++ b/Lib/test/test_math.py
@@ -475,6 +475,19 @@ class MathTests(unittest.TestCase):
# similarly, copysign(2., NAN) could be 2. or -2.
self.assertEqual(abs(math.copysign(2., NAN)), 2.)
+ def test_signbit(self):
+ self.assertRaises(TypeError, math.signbit)
+ self.assertRaises(TypeError, math.signbit, '1.0')
+
+ # C11, §7.12.3.6 requires signbit() to return a nonzero value
+ # if and only if the sign of its argument value is negative,
+ # but in practice, we are only interested in a boolean value.
+ self.assertIsInstance(math.signbit(1.0), bool)
+
+ for arg in [0., 1., INF, NAN]:
+ self.assertFalse(math.signbit(arg))
+ self.assertTrue(math.signbit(-arg))
+
def testCos(self):
self.assertRaises(TypeError, math.cos)
self.ftest('cos(-pi/2)', math.cos(-math.pi/2), 0, abs_tol=math.ulp(1))
@@ -1387,7 +1400,6 @@ class MathTests(unittest.TestCase):
args = ((-5, -5, 10), (1.5, 4611686018427387904, 2305843009213693952))
self.assertEqual(sumprod(*args), 0.0)
-
@requires_IEEE_754
@unittest.skipIf(HAVE_DOUBLE_ROUNDING,
"sumprod() accuracy not guaranteed on machines with double rounding")
@@ -2486,7 +2498,6 @@ class MathTests(unittest.TestCase):
with self.assertRaises(ValueError):
math.nextafter(1.0, INF, steps=-1)
-
@requires_IEEE_754
def test_ulp(self):
self.assertEqual(math.ulp(1.0), sys.float_info.epsilon)
diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py
index e6ffd2b0ffe..e476e472780 100644
--- a/Lib/test/test_optparse.py
+++ b/Lib/test/test_optparse.py
@@ -615,9 +615,9 @@ Options:
self.parser.add_option(
"-p", "--prob",
help="blow up with probability PROB [default: %default]")
- self.parser.set_defaults(prob=0.43)
+ self.parser.set_defaults(prob=0.25)
expected_help = self.help_prefix + \
- " -p PROB, --prob=PROB blow up with probability PROB [default: 0.43]\n"
+ " -p PROB, --prob=PROB blow up with probability PROB [default: 0.25]\n"
self.assertHelp(self.parser, expected_help)
def test_alt_expand(self):
diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py
index ef596630b93..98629df4574 100644
--- a/Lib/test/test_peepholer.py
+++ b/Lib/test/test_peepholer.py
@@ -292,6 +292,7 @@ class TestTranforms(BytecodeTestCase):
('---x', 'UNARY_NEGATIVE', None, False, None, None),
('~~~x', 'UNARY_INVERT', None, False, None, None),
('+++x', 'CALL_INTRINSIC_1', intrinsic_positive, False, None, None),
+ ('~True', 'UNARY_INVERT', None, False, None, None),
]
for (
@@ -718,9 +719,9 @@ class TestTranforms(BytecodeTestCase):
self.assertEqual(format('x = %d!', 1234), 'x = 1234!')
self.assertEqual(format('x = %x!', 1234), 'x = 4d2!')
self.assertEqual(format('x = %f!', 1234), 'x = 1234.000000!')
- self.assertEqual(format('x = %s!', 1234.5678901), 'x = 1234.5678901!')
- self.assertEqual(format('x = %f!', 1234.5678901), 'x = 1234.567890!')
- self.assertEqual(format('x = %d!', 1234.5678901), 'x = 1234!')
+ self.assertEqual(format('x = %s!', 1234.0000625), 'x = 1234.0000625!')
+ self.assertEqual(format('x = %f!', 1234.0000625), 'x = 1234.000063!')
+ self.assertEqual(format('x = %d!', 1234.0000625), 'x = 1234!')
self.assertEqual(format('x = %s%% %%%%', 1234), 'x = 1234% %%')
self.assertEqual(format('x = %s!', '%% %s'), 'x = %% %s!')
self.assertEqual(format('x = %s, y = %d', 12, 34), 'x = 12, y = 34')
diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py
index 3688cc4267b..479649053ab 100644
--- a/Lib/test/test_platform.py
+++ b/Lib/test/test_platform.py
@@ -133,6 +133,22 @@ class PlatformTest(unittest.TestCase):
for terse in (False, True):
res = platform.platform(aliased, terse)
+ def test__platform(self):
+ for src, res in [
+ ('foo bar', 'foo_bar'),
+ (
+ '1/2\\3:4;5"6(7)8(7)6"5;4:3\\2/1',
+ '1-2-3-4-5-6-7-8-7-6-5-4-3-2-1'
+ ),
+ ('--', ''),
+ ('-f', '-f'),
+ ('-foo----', '-foo'),
+ ('--foo---', '-foo'),
+ ('---foo--', '-foo'),
+ ]:
+ with self.subTest(src=src):
+ self.assertEqual(platform._platform(src), res)
+
def test_system(self):
res = platform.system()
diff --git a/Lib/test/test_pprint.py b/Lib/test/test_pprint.py
index 0c84d3d3bfd..41c337ade7e 100644
--- a/Lib/test/test_pprint.py
+++ b/Lib/test/test_pprint.py
@@ -458,7 +458,7 @@ class QueryTestCase(unittest.TestCase):
return super().__new__(Temperature, celsius_degrees)
def __repr__(self):
kelvin_degrees = self + 273.15
- return f"{kelvin_degrees}°K"
+ return f"{kelvin_degrees:.2f}°K"
self.assertEqual(pprint.pformat(Temperature(1000)), '1273.15°K')
def test_sorted_dict(self):
diff --git a/Lib/test/test_pyclbr.py b/Lib/test/test_pyclbr.py
index 3e7b2cd0dc9..bce68e6cd7a 100644
--- a/Lib/test/test_pyclbr.py
+++ b/Lib/test/test_pyclbr.py
@@ -11,7 +11,6 @@ from types import FunctionType, MethodType, BuiltinFunctionType
import pyclbr
from unittest import TestCase, main as unittest_main
from test.test_importlib import util as test_importlib_util
-import warnings
StaticMethodType = type(staticmethod(lambda: None))
@@ -246,9 +245,6 @@ class PyclbrTest(TestCase):
# These were once some of the longest modules.
cm('random', ignore=('Random',)) # from _random import Random as CoreGenerator
cm('pickle', ignore=('partial', 'PickleBuffer'))
- with warnings.catch_warnings():
- warnings.simplefilter('ignore', DeprecationWarning)
- cm('sre_parse', ignore=('dump', 'groups', 'pos')) # from sre_constants import *; property
with temporary_main_spec():
cm(
'pdb',
diff --git a/Lib/test/test_pyexpat.py b/Lib/test/test_pyexpat.py
index 1d56ccd71cf..d4b4f60be98 100644
--- a/Lib/test/test_pyexpat.py
+++ b/Lib/test/test_pyexpat.py
@@ -9,12 +9,11 @@ import traceback
from io import BytesIO
from test import support
from test.support import os_helper
-
+from test.support import sortdict
+from unittest import mock
from xml.parsers import expat
from xml.parsers.expat import errors
-from test.support import sortdict
-
class SetAttributeTest(unittest.TestCase):
def setUp(self):
@@ -436,6 +435,19 @@ class BufferTextTest(unittest.TestCase):
"<!--abc-->", "4", "<!--def-->", "5", "</a>"],
"buffered text not properly split")
+ def test_change_character_data_handler_in_callback(self):
+ # Test that xmlparse_handler_setter() properly handles
+ # the special case "parser.CharacterDataHandler = None".
+ def handler(*args):
+ parser.CharacterDataHandler = None
+
+ handler_wrapper = mock.Mock(wraps=handler)
+ parser = expat.ParserCreate()
+ parser.CharacterDataHandler = handler_wrapper
+ parser.Parse(b"<a>1<b/>2<c></c>3<!--abc-->4<!--def-->5</a> ", True)
+ handler_wrapper.assert_called_once()
+ self.assertIsNone(parser.CharacterDataHandler)
+
# Test handling of exception from callback:
class HandlerExceptionTest(unittest.TestCase):
@@ -595,7 +607,7 @@ class ChardataBufferTest(unittest.TestCase):
def test_disabling_buffer(self):
xml1 = b"<?xml version='1.0' encoding='iso8859'?><a>" + b'a' * 512
xml2 = b'b' * 1024
- xml3 = b'c' * 1024 + b'</a>';
+ xml3 = b'c' * 1024 + b'</a>'
parser = expat.ParserCreate()
parser.CharacterDataHandler = self.counting_handler
parser.buffer_text = 1
diff --git a/Lib/test/test_re.py b/Lib/test/test_re.py
index e9128ac1d97..993652d2e88 100644
--- a/Lib/test/test_re.py
+++ b/Lib/test/test_re.py
@@ -5,7 +5,6 @@ from test.support import (gc_collect, bigmemtest, _2G,
import locale
import re
import string
-import sys
import unittest
import warnings
from re import Scanner
@@ -2927,33 +2926,6 @@ class ImplementationTest(unittest.TestCase):
pat = re.compile("")
check_disallow_instantiation(self, type(pat.scanner("")))
- def test_deprecated_modules(self):
- deprecated = {
- 'sre_compile': ['compile', 'error',
- 'SRE_FLAG_IGNORECASE', 'SUBPATTERN',
- '_compile_info'],
- 'sre_constants': ['error', 'SRE_FLAG_IGNORECASE', 'SUBPATTERN',
- '_NamedIntConstant'],
- 'sre_parse': ['SubPattern', 'parse',
- 'SRE_FLAG_IGNORECASE', 'SUBPATTERN',
- '_parse_sub'],
- }
- for name in deprecated:
- with self.subTest(module=name):
- sys.modules.pop(name, None)
- with self.assertWarns(DeprecationWarning) as w:
- __import__(name)
- self.assertEqual(str(w.warning),
- f"module {name!r} is deprecated")
- self.assertEqual(w.filename, __file__)
- self.assertIn(name, sys.modules)
- mod = sys.modules[name]
- self.assertEqual(mod.__name__, name)
- self.assertEqual(mod.__package__, '')
- for attr in deprecated[name]:
- self.assertHasAttr(mod, attr)
- del sys.modules[name]
-
@cpython_only
def test_case_helpers(self):
import _sre
diff --git a/Lib/test/test_reprlib.py b/Lib/test/test_reprlib.py
index d5631efcdb7..22a55b57c07 100644
--- a/Lib/test/test_reprlib.py
+++ b/Lib/test/test_reprlib.py
@@ -397,20 +397,20 @@ class ReprTests(unittest.TestCase):
'object': {
1: 'two',
b'three': [
- (4.5, 6.7),
+ (4.5, 6.25),
[set((8, 9)), frozenset((10, 11))],
],
},
'tests': (
(dict(indent=None), '''\
- {1: 'two', b'three': [(4.5, 6.7), [{8, 9}, frozenset({10, 11})]]}'''),
+ {1: 'two', b'three': [(4.5, 6.25), [{8, 9}, frozenset({10, 11})]]}'''),
(dict(indent=False), '''\
{
1: 'two',
b'three': [
(
4.5,
- 6.7,
+ 6.25,
),
[
{
@@ -430,7 +430,7 @@ class ReprTests(unittest.TestCase):
b'three': [
(
4.5,
- 6.7,
+ 6.25,
),
[
{
@@ -450,7 +450,7 @@ class ReprTests(unittest.TestCase):
b'three': [
(
4.5,
- 6.7,
+ 6.25,
),
[
{
@@ -470,7 +470,7 @@ class ReprTests(unittest.TestCase):
b'three': [
(
4.5,
- 6.7,
+ 6.25,
),
[
{
@@ -490,7 +490,7 @@ class ReprTests(unittest.TestCase):
b'three': [
(
4.5,
- 6.7,
+ 6.25,
),
[
{
@@ -518,7 +518,7 @@ class ReprTests(unittest.TestCase):
b'three': [
(
4.5,
- 6.7,
+ 6.25,
),
[
{
@@ -538,7 +538,7 @@ class ReprTests(unittest.TestCase):
-->b'three': [
-->-->(
-->-->-->4.5,
- -->-->-->6.7,
+ -->-->-->6.25,
-->-->),
-->-->[
-->-->-->{
@@ -558,7 +558,7 @@ class ReprTests(unittest.TestCase):
....b'three': [
........(
............4.5,
- ............6.7,
+ ............6.25,
........),
........[
............{
diff --git a/Lib/test/test_sqlite3/test_dbapi.py b/Lib/test/test_sqlite3/test_dbapi.py
index 291e0356253..3602726437d 100644
--- a/Lib/test/test_sqlite3/test_dbapi.py
+++ b/Lib/test/test_sqlite3/test_dbapi.py
@@ -31,8 +31,7 @@ import urllib.parse
import warnings
from test.support import (
- SHORT_TIMEOUT, check_disallow_instantiation, requires_subprocess,
- is_apple, is_emscripten, is_wasi
+ SHORT_TIMEOUT, check_disallow_instantiation, requires_subprocess
)
from test.support import gc_collect
from test.support import threading_helper, import_helper
@@ -641,14 +640,21 @@ class OpenTests(unittest.TestCase):
self.assertTrue(os.path.exists(path))
cx.execute(self._sql)
+ def get_undecodable_path(self):
+ path = TESTFN_UNDECODABLE
+ if not path:
+ self.skipTest("only works if there are undecodable paths")
+ try:
+ open(path, 'wb').close()
+ except OSError:
+ self.skipTest(f"can't create file with undecodable path {path!r}")
+ unlink(path)
+ return path
+
@unittest.skipIf(sys.platform == "win32", "skipped on Windows")
- @unittest.skipIf(is_apple, "skipped on Apple platforms")
- @unittest.skipIf(is_emscripten or is_wasi, "not supported on Emscripten/WASI")
- @unittest.skipUnless(TESTFN_UNDECODABLE, "only works if there are undecodable paths")
def test_open_with_undecodable_path(self):
- path = TESTFN_UNDECODABLE
+ path = self.get_undecodable_path()
self.addCleanup(unlink, path)
- self.assertFalse(os.path.exists(path))
with contextlib.closing(sqlite.connect(path)) as cx:
self.assertTrue(os.path.exists(path))
cx.execute(self._sql)
@@ -688,14 +694,10 @@ class OpenTests(unittest.TestCase):
cx.execute(self._sql)
@unittest.skipIf(sys.platform == "win32", "skipped on Windows")
- @unittest.skipIf(is_apple, "skipped on Apple platforms")
- @unittest.skipIf(is_emscripten or is_wasi, "not supported on Emscripten/WASI")
- @unittest.skipUnless(TESTFN_UNDECODABLE, "only works if there are undecodable paths")
def test_open_undecodable_uri(self):
- path = TESTFN_UNDECODABLE
+ path = self.get_undecodable_path()
self.addCleanup(unlink, path)
uri = "file:" + urllib.parse.quote(path)
- self.assertFalse(os.path.exists(path))
with contextlib.closing(sqlite.connect(uri, uri=True)) as cx:
self.assertTrue(os.path.exists(path))
cx.execute(self._sql)
diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py
index 0dd619dd7c8..8250b0aef09 100644
--- a/Lib/test/test_statistics.py
+++ b/Lib/test/test_statistics.py
@@ -3319,7 +3319,8 @@ class TestNormalDistC(unittest.TestCase, TestNormalDist):
def load_tests(loader, tests, ignore):
"""Used for doctest/unittest integration."""
tests.addTests(doctest.DocTestSuite())
- tests.addTests(doctest.DocTestSuite(statistics))
+ if sys.float_repr_style == 'short':
+ tests.addTests(doctest.DocTestSuite(statistics))
return tests
diff --git a/Lib/test/test_str.py b/Lib/test/test_str.py
index d6a7bd0da59..2584fbf72d3 100644
--- a/Lib/test/test_str.py
+++ b/Lib/test/test_str.py
@@ -1231,10 +1231,10 @@ class StrTest(string_tests.StringLikeTest,
self.assertEqual('{0:\x00^6}'.format(3), '\x00\x003\x00\x00\x00')
self.assertEqual('{0:<6}'.format(3), '3 ')
- self.assertEqual('{0:\x00<6}'.format(3.14), '3.14\x00\x00')
- self.assertEqual('{0:\x01<6}'.format(3.14), '3.14\x01\x01')
- self.assertEqual('{0:\x00^6}'.format(3.14), '\x003.14\x00')
- self.assertEqual('{0:^6}'.format(3.14), ' 3.14 ')
+ self.assertEqual('{0:\x00<6}'.format(3.25), '3.25\x00\x00')
+ self.assertEqual('{0:\x01<6}'.format(3.25), '3.25\x01\x01')
+ self.assertEqual('{0:\x00^6}'.format(3.25), '\x003.25\x00')
+ self.assertEqual('{0:^6}'.format(3.25), ' 3.25 ')
self.assertEqual('{0:\x00<12}'.format(3+2.0j), '(3+2j)\x00\x00\x00\x00\x00\x00')
self.assertEqual('{0:\x01<12}'.format(3+2.0j), '(3+2j)\x01\x01\x01\x01\x01\x01')
diff --git a/Lib/test/test_strptime.py b/Lib/test/test_strptime.py
index 268230f6da7..0241e543cd7 100644
--- a/Lib/test/test_strptime.py
+++ b/Lib/test/test_strptime.py
@@ -221,14 +221,16 @@ class StrptimeTests(unittest.TestCase):
self.assertRaises(ValueError, _strptime._strptime_time, data_string="%d",
format="%A")
for bad_format in ("%", "% ", "%\n"):
- with self.assertRaisesRegex(ValueError, "stray % in format "):
+ with (self.subTest(format=bad_format),
+ self.assertRaisesRegex(ValueError, "stray % in format ")):
_strptime._strptime_time("2005", bad_format)
- for bad_format in ("%e", "%Oe", "%O", "%O ", "%Ee", "%E", "%E ",
- "%.", "%+", "%_", "%~", "%\\",
+ for bad_format in ("%i", "%Oi", "%O", "%O ", "%Ee", "%E", "%E ",
+ "%.", "%+", "%~", "%\\",
"%O.", "%O+", "%O_", "%O~", "%O\\"):
directive = bad_format[1:].rstrip()
- with self.assertRaisesRegex(ValueError,
- f"'{re.escape(directive)}' is a bad directive in format "):
+ with (self.subTest(format=bad_format),
+ self.assertRaisesRegex(ValueError,
+ f"'{re.escape(directive)}' is a bad directive in format ")):
_strptime._strptime_time("2005", bad_format)
msg_week_no_year_or_weekday = r"ISO week directive '%V' must be used with " \
@@ -335,6 +337,15 @@ class StrptimeTests(unittest.TestCase):
self.roundtrip('%B', 1, (1900, m, 1, 0, 0, 0, 0, 1, 0))
self.roundtrip('%b', 1, (1900, m, 1, 0, 0, 0, 0, 1, 0))
+ @run_with_locales('LC_TIME', 'az_AZ', 'ber_DZ', 'ber_MA', 'crh_UA')
+ def test_month_locale2(self):
+ # Test for month directives
+ # Month name contains 'İ' ('\u0130')
+ self.roundtrip('%B', 1, (2025, 6, 1, 0, 0, 0, 6, 152, 0))
+ self.roundtrip('%b', 1, (2025, 6, 1, 0, 0, 0, 6, 152, 0))
+ self.roundtrip('%B', 1, (2025, 7, 1, 0, 0, 0, 1, 182, 0))
+ self.roundtrip('%b', 1, (2025, 7, 1, 0, 0, 0, 1, 182, 0))
+
def test_day(self):
# Test for day directives
self.roundtrip('%d %Y', 2)
@@ -480,13 +491,11 @@ class StrptimeTests(unittest.TestCase):
# * Year is not included: ha_NG.
# * Use non-Gregorian calendar: lo_LA, thai, th_TH.
# On Windows: ar_IN, ar_SA, fa_IR, ps_AF.
- #
- # BUG: Generates regexp that does not match the current date and time
- # for lzh_TW.
@run_with_locales('LC_TIME', 'C', 'en_US', 'fr_FR', 'de_DE', 'ja_JP',
'he_IL', 'eu_ES', 'ar_AE', 'mfe_MU', 'yo_NG',
'csb_PL', 'br_FR', 'gez_ET', 'brx_IN',
- 'my_MM', 'or_IN', 'shn_MM', 'az_IR')
+ 'my_MM', 'or_IN', 'shn_MM', 'az_IR',
+ 'byn_ER', 'wal_ET', 'lzh_TW')
def test_date_time_locale(self):
# Test %c directive
loc = locale.getlocale(locale.LC_TIME)[0]
@@ -525,11 +534,9 @@ class StrptimeTests(unittest.TestCase):
# NB: Does not roundtrip because use non-Gregorian calendar:
# lo_LA, thai, th_TH. On Windows: ar_IN, ar_SA, fa_IR, ps_AF.
- # BUG: Generates regexp that does not match the current date
- # for lzh_TW.
@run_with_locales('LC_TIME', 'C', 'en_US', 'fr_FR', 'de_DE', 'ja_JP',
'he_IL', 'eu_ES', 'ar_AE',
- 'az_IR', 'my_MM', 'or_IN', 'shn_MM')
+ 'az_IR', 'my_MM', 'or_IN', 'shn_MM', 'lzh_TW')
def test_date_locale(self):
# Test %x directive
now = time.time()
@@ -546,7 +553,7 @@ class StrptimeTests(unittest.TestCase):
# NB: Dates before 1969 do not roundtrip on many locales, including C.
@unittest.skipIf(support.linked_to_musl(), "musl libc issue, bpo-46390")
@run_with_locales('LC_TIME', 'en_US', 'fr_FR', 'de_DE', 'ja_JP',
- 'eu_ES', 'ar_AE', 'my_MM', 'shn_MM')
+ 'eu_ES', 'ar_AE', 'my_MM', 'shn_MM', 'lzh_TW')
def test_date_locale2(self):
# Test %x directive
loc = locale.getlocale(locale.LC_TIME)[0]
@@ -562,11 +569,11 @@ class StrptimeTests(unittest.TestCase):
# norwegian, nynorsk.
# * Hours are in 12-hour notation without AM/PM indication: hy_AM,
# ms_MY, sm_WS.
- # BUG: Generates regexp that does not match the current time for lzh_TW.
@run_with_locales('LC_TIME', 'C', 'en_US', 'fr_FR', 'de_DE', 'ja_JP',
'aa_ET', 'am_ET', 'az_IR', 'byn_ER', 'fa_IR', 'gez_ET',
'my_MM', 'om_ET', 'or_IN', 'shn_MM', 'sid_ET', 'so_SO',
- 'ti_ET', 'tig_ER', 'wal_ET')
+ 'ti_ET', 'tig_ER', 'wal_ET', 'lzh_TW',
+ 'ar_SA', 'bg_BG')
def test_time_locale(self):
# Test %X directive
loc = locale.getlocale(locale.LC_TIME)[0]
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index b09e524a756..c52d2421941 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -2872,6 +2872,13 @@ class SyntaxErrorTestCase(unittest.TestCase):
"""
self._check_error(source, "parameter and nonlocal", lineno=3)
+ def test_raise_from_error_message(self):
+ source = """if 1:
+ raise AssertionError() from None
+ print(1,,2)
+ """
+ self._check_error(source, "invalid syntax", lineno=3)
+
def test_yield_outside_function(self):
self._check_error("if 0: yield", "outside function")
self._check_error("if 0: yield\nelse: x=1", "outside function")
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 27524d86355..486bf10a0b5 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -869,12 +869,7 @@ class SysModuleTest(unittest.TestCase):
def assert_raise_on_new_sys_type(self, sys_attr):
# Users are intentionally prevented from creating new instances of
# sys.flags, sys.version_info, and sys.getwindowsversion.
- arg = sys_attr
- attr_type = type(sys_attr)
- with self.assertRaises(TypeError):
- attr_type(arg)
- with self.assertRaises(TypeError):
- attr_type.__new__(attr_type, arg)
+ support.check_disallow_instantiation(self, type(sys_attr), sys_attr)
def test_sys_flags_no_instantiation(self):
self.assert_raise_on_new_sys_type(sys.flags)
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index 13b55d0f0a2..00a3037c3e1 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -1247,6 +1247,61 @@ class ThreadTests(BaseTestCase):
self.assertEqual(err, b"")
self.assertIn(b"all clear", out)
+ @support.subTests('lock_class_name', ['Lock', 'RLock'])
+ def test_acquire_daemon_thread_lock_in_finalization(self, lock_class_name):
+ # gh-123940: Py_Finalize() prevents other threads from running Python
+ # code (and so, releasing locks), so acquiring a locked lock can not
+ # succeed.
+ # We raise an exception rather than hang.
+ code = textwrap.dedent(f"""
+ import threading
+ import time
+
+ thread_started_event = threading.Event()
+
+ lock = threading.{lock_class_name}()
+ def loop():
+ if {lock_class_name!r} == 'RLock':
+ lock.acquire()
+ with lock:
+ thread_started_event.set()
+ while True:
+ time.sleep(1)
+
+ uncontested_lock = threading.{lock_class_name}()
+
+ class Cycle:
+ def __init__(self):
+ self.self_ref = self
+ self.thr = threading.Thread(
+ target=loop, daemon=True)
+ self.thr.start()
+ thread_started_event.wait()
+
+ def __del__(self):
+ assert self.thr.is_alive()
+
+ # We *can* acquire an unlocked lock
+ uncontested_lock.acquire()
+ if {lock_class_name!r} == 'RLock':
+ uncontested_lock.acquire()
+
+ # Acquiring a locked one fails
+ try:
+ lock.acquire()
+ except PythonFinalizationError:
+ assert self.thr.is_alive()
+ print('got the correct exception!')
+
+ # Cycle holds a reference to itself, which ensures it is
+ # cleaned up during the GC that runs after daemon threads
+ # have been forced to exit during finalization.
+ Cycle()
+ """)
+ rc, out, err = assert_python_ok("-c", code)
+ self.assertEqual(err, b"")
+ self.assertIn(b"got the correct exception", out)
+
def test_start_new_thread_failed(self):
# gh-109746: if Python fails to start newly created thread
# due to failure of underlying PyThread_start_new_thread() call,
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index a117413301b..fccdcc975e6 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -2,7 +2,7 @@
from test.support import (
run_with_locale, cpython_only, no_rerun,
- MISSING_C_DOCSTRINGS, EqualToForwardRef,
+ MISSING_C_DOCSTRINGS, EqualToForwardRef, check_disallow_instantiation,
)
from test.support.script_helper import assert_python_ok
from test.support.import_helper import import_fresh_module
@@ -21,6 +21,7 @@ import types
import unittest.mock
import weakref
import typing
+import re
c_types = import_fresh_module('types', fresh=['_types'])
py_types = import_fresh_module('types', blocked=['_types'])
@@ -517,8 +518,8 @@ class TypesTests(unittest.TestCase):
# and a number after the decimal. This is tricky, because
# a totally empty format specifier means something else.
# So, just use a sign flag
- test(1e200, '+g', '+1e+200')
- test(1e200, '+', '+1e+200')
+ test(1.25e200, '+g', '+1.25e+200')
+ test(1.25e200, '+', '+1.25e+200')
test(1.1e200, '+g', '+1.1e+200')
test(1.1e200, '+', '+1.1e+200')
@@ -1148,8 +1149,7 @@ class UnionTests(unittest.TestCase):
msg='Check for union reference leak.')
def test_instantiation(self):
- with self.assertRaises(TypeError):
- types.UnionType()
+ check_disallow_instantiation(self, types.UnionType)
self.assertIs(int, types.UnionType[int])
self.assertIs(int, types.UnionType[int, int])
self.assertEqual(int | str, types.UnionType[int, str])
@@ -1376,6 +1376,27 @@ class MappingProxyTests(unittest.TestCase):
view = self.mappingproxy(mapping)
self.assertEqual(hash(view), hash(mapping))
+ def test_richcompare(self):
+ mp1 = self.mappingproxy({'a': 1})
+ mp1_2 = self.mappingproxy({'a': 1})
+ mp2 = self.mappingproxy({'a': 2})
+
+ self.assertTrue(mp1 == mp1_2)
+ self.assertFalse(mp1 != mp1_2)
+ self.assertFalse(mp1 == mp2)
+ self.assertTrue(mp1 != mp2)
+
+ msg = "not supported between instances of 'mappingproxy' and 'mappingproxy'"
+
+ with self.assertRaisesRegex(TypeError, msg):
+ mp1 > mp2
+ with self.assertRaisesRegex(TypeError, msg):
+ mp1 < mp1_2
+ with self.assertRaisesRegex(TypeError, msg):
+ mp2 >= mp2
+ with self.assertRaisesRegex(TypeError, msg):
+ mp1_2 <= mp1
+
class ClassCreationTests(unittest.TestCase):
@@ -2010,6 +2031,24 @@ class SimpleNamespaceTests(unittest.TestCase):
self.assertEqual(ns1, ns2)
self.assertNotEqual(ns2, types.SimpleNamespace())
+ def test_richcompare_unsupported(self):
+ ns1 = types.SimpleNamespace(x=1)
+ ns2 = types.SimpleNamespace(y=2)
+
+ msg = re.escape(
+ "not supported between instances of "
+ "'types.SimpleNamespace' and 'types.SimpleNamespace'"
+ )
+
+ with self.assertRaisesRegex(TypeError, msg):
+ ns1 > ns2
+ with self.assertRaisesRegex(TypeError, msg):
+ ns1 >= ns2
+ with self.assertRaisesRegex(TypeError, msg):
+ ns1 < ns2
+ with self.assertRaisesRegex(TypeError, msg):
+ ns1 <= ns2
+
def test_nested(self):
ns1 = types.SimpleNamespace(a=1, b=2)
ns2 = types.SimpleNamespace()
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index ef02e8202fc..b1615bbff38 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -1605,7 +1605,10 @@ class TypeVarTupleTests(BaseTestCase):
self.assertEqual(gth(func1), {'args': Unpack[Ts]})
def func2(*args: *tuple[int, str]): pass
- self.assertEqual(gth(func2), {'args': Unpack[tuple[int, str]]})
+ hint = gth(func2)['args']
+ self.assertIsInstance(hint, types.GenericAlias)
+ self.assertEqual(hint.__args__[0], int)
+ self.assertIs(hint.__unpacked__, True)
class CustomVariadic(Generic[*Ts]): pass
@@ -1620,7 +1623,10 @@ class TypeVarTupleTests(BaseTestCase):
{'args': Unpack[Ts]})
def func2(*args: '*tuple[int, str]'): pass
- self.assertEqual(gth(func2), {'args': Unpack[tuple[int, str]]})
+ hint = gth(func2)['args']
+ self.assertIsInstance(hint, types.GenericAlias)
+ self.assertEqual(hint.__args__[0], int)
+ self.assertIs(hint.__unpacked__, True)
class CustomVariadic(Generic[*Ts]): pass
@@ -6303,31 +6309,6 @@ class NoTypeCheckTests(BaseTestCase):
class InternalsTests(BaseTestCase):
- def test_deprecation_for_no_type_params_passed_to__evaluate(self):
- with self.assertWarnsRegex(
- DeprecationWarning,
- (
- "Failing to pass a value to the 'type_params' parameter "
- "of 'typing._eval_type' is deprecated"
- )
- ) as cm:
- self.assertEqual(typing._eval_type(list["int"], globals(), {}), list[int])
-
- self.assertEqual(cm.filename, __file__)
-
- f = ForwardRef("int")
-
- with self.assertWarnsRegex(
- DeprecationWarning,
- (
- "Failing to pass a value to the 'type_params' parameter "
- "of 'typing.ForwardRef._evaluate' is deprecated"
- )
- ) as cm:
- self.assertIs(f._evaluate(globals(), {}, recursive_guard=frozenset()), int)
-
- self.assertEqual(cm.filename, __file__)
-
def test_collect_parameters(self):
typing = import_helper.import_fresh_module("typing")
with self.assertWarnsRegex(
@@ -7114,6 +7095,24 @@ class GetTypeHintsTests(BaseTestCase):
right_hints = get_type_hints(t.add_right, globals(), locals())
self.assertEqual(right_hints['node'], Node[T])
+ def test_get_type_hints_preserve_generic_alias_subclasses(self):
+ # https://github.com/python/cpython/issues/130870
+ # A real world example of this is `collections.abc.Callable`. When parameterized,
+ # the result is a subclass of `types.GenericAlias`.
+ class MyAlias(types.GenericAlias):
+ pass
+
+ class MyClass:
+ def __class_getitem__(cls, args):
+ return MyAlias(cls, args)
+
+ # Using a forward reference is important, otherwise it works as expected.
+ # `y` tests that the `GenericAlias` subclass is preserved when stripping `Annotated`.
+ def func(x: MyClass['int'], y: MyClass[Annotated[int, ...]]): ...
+
+ assert isinstance(get_type_hints(func)['x'], MyAlias)
+ assert isinstance(get_type_hints(func)['y'], MyAlias)
+
class GetUtilitiesTestCase(TestCase):
def test_get_origin(self):
@@ -7327,6 +7326,12 @@ class EvaluateForwardRefTests(BaseTestCase):
list[EqualToForwardRef('A')],
)
+ def test_with_module(self):
+ from test.typinganndata import fwdref_module
+
+ typing.evaluate_forward_ref(
+ fwdref_module.fw,)
+
class CollectionsAbcTests(BaseTestCase):
diff --git a/Lib/test/test_unittest/test_case.py b/Lib/test/test_unittest/test_case.py
index d66cab146af..cf10e956bf2 100644
--- a/Lib/test/test_unittest/test_case.py
+++ b/Lib/test/test_unittest/test_case.py
@@ -1920,6 +1920,22 @@ test case
with self.assertLogs():
raise ZeroDivisionError("Unexpected")
+ def testAssertLogsWithFormatter(self):
+ # Check alternative formats will be respected
+ format = "[No.1: the larch] %(levelname)s:%(name)s:%(message)s"
+ formatter = logging.Formatter(format)
+ with self.assertNoStderr():
+ with self.assertLogs() as cm:
+ log_foo.info("1")
+ log_foobar.debug("2")
+ self.assertEqual(cm.output, ["INFO:foo:1"])
+ self.assertLogRecords(cm.records, [{'name': 'foo'}])
+ with self.assertLogs(formatter=formatter) as cm:
+ log_foo.info("1")
+ log_foobar.debug("2")
+ self.assertEqual(cm.output, ["[No.1: the larch] INFO:foo:1"])
+ self.assertLogRecords(cm.records, [{'name': 'foo'}])
+
def testAssertNoLogsDefault(self):
with self.assertRaises(self.failureException) as cm:
with self.assertNoLogs():
diff --git a/Lib/test/test_xml_etree.py b/Lib/test/test_xml_etree.py
index 38be2cd437f..bf6d5074fde 100644
--- a/Lib/test/test_xml_etree.py
+++ b/Lib/test/test_xml_etree.py
@@ -218,6 +218,33 @@ class ElementTreeTest(unittest.TestCase):
def serialize_check(self, elem, expected):
self.assertEqual(serialize(elem), expected)
+ def test_constructor(self):
+ # Test constructor behavior.
+
+ with self.assertRaises(TypeError):
+ tree = ET.ElementTree("")
+ with self.assertRaises(TypeError):
+ tree = ET.ElementTree(ET.ElementTree())
+
+ def test_setroot(self):
+ # Test _setroot behavior.
+
+ tree = ET.ElementTree()
+ element = ET.Element("tag")
+ tree._setroot(element)
+ self.assertEqual(tree.getroot().tag, "tag")
+ self.assertEqual(tree.getroot(), element)
+
+ # Test behavior with an invalid root element
+
+ tree = ET.ElementTree()
+ with self.assertRaises(TypeError):
+ tree._setroot("")
+ with self.assertRaises(TypeError):
+ tree._setroot(ET.ElementTree())
+ with self.assertRaises(TypeError):
+ tree._setroot(None)
+
def test_interface(self):
# Test element tree interface.
diff --git a/Lib/test/test_zoneinfo/test_zoneinfo.py b/Lib/test/test_zoneinfo/test_zoneinfo.py
index f313e394f49..44e87e71c8e 100644
--- a/Lib/test/test_zoneinfo/test_zoneinfo.py
+++ b/Lib/test/test_zoneinfo/test_zoneinfo.py
@@ -58,6 +58,10 @@ def tearDownModule():
shutil.rmtree(TEMP_DIR)
+class CustomError(Exception):
+ pass
+
+
class TzPathUserMixin:
"""
Adds a setUp() and tearDown() to make TZPATH manipulations thread-safe.
@@ -404,6 +408,25 @@ class ZoneInfoTest(TzPathUserMixin, ZoneInfoTestBase):
self.assertEqual(t.utcoffset(), offset.utcoffset)
self.assertEqual(t.dst(), offset.dst)
+ def test_cache_exception(self):
+ class Incomparable(str):
+ eq_called = False
+ def __eq__(self, other):
+ self.eq_called = True
+ raise CustomError
+ __hash__ = str.__hash__
+
+ key = "America/Los_Angeles"
+ tz1 = self.klass(key)
+ key = Incomparable(key)
+ try:
+ tz2 = self.klass(key)
+ except CustomError:
+ self.assertTrue(key.eq_called)
+ else:
+ self.assertFalse(key.eq_called)
+ self.assertIs(tz2, tz1)
+
class CZoneInfoTest(ZoneInfoTest):
module = c_zoneinfo
@@ -1507,6 +1530,26 @@ class ZoneInfoCacheTest(TzPathUserMixin, ZoneInfoTestBase):
self.assertIsNot(dub0, dub1)
self.assertIs(tok0, tok1)
+ def test_clear_cache_refleak(self):
+ class Stringy(str):
+ allow_comparisons = True
+ def __eq__(self, other):
+ if not self.allow_comparisons:
+ raise CustomError
+ return super().__eq__(other)
+ __hash__ = str.__hash__
+
+ key = Stringy("America/Los_Angeles")
+ self.klass(key)
+ key.allow_comparisons = False
+ try:
+ # Note: This is try/except rather than assertRaises because
+ # there is no guarantee that the key is even still in the cache,
+ # or that the key for the cache is the original `key` object.
+ self.klass.clear_cache(only_keys="America/Los_Angeles")
+ except CustomError:
+ pass
+
class CZoneInfoCacheTest(ZoneInfoCacheTest):
module = c_zoneinfo
diff --git a/Lib/test/test_zoneinfo/test_zoneinfo_property.py b/Lib/test/test_zoneinfo/test_zoneinfo_property.py
index feaa77f3e7f..294c7e9b27a 100644
--- a/Lib/test/test_zoneinfo/test_zoneinfo_property.py
+++ b/Lib/test/test_zoneinfo/test_zoneinfo_property.py
@@ -146,20 +146,24 @@ class ZoneInfoPickleTest(ZoneInfoTestBase):
@add_key_examples
def test_pickle_unpickle_cache(self, key):
zi = self.klass(key)
- pkl_str = pickle.dumps(zi)
- zi_rt = pickle.loads(pkl_str)
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ pkl_str = pickle.dumps(zi, proto)
+ zi_rt = pickle.loads(pkl_str)
- self.assertIs(zi, zi_rt)
+ self.assertIs(zi, zi_rt)
@hypothesis.given(key=valid_keys())
@add_key_examples
def test_pickle_unpickle_no_cache(self, key):
zi = self.klass.no_cache(key)
- pkl_str = pickle.dumps(zi)
- zi_rt = pickle.loads(pkl_str)
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ with self.subTest(proto=proto):
+ pkl_str = pickle.dumps(zi, proto)
+ zi_rt = pickle.loads(pkl_str)
- self.assertIsNot(zi, zi_rt)
- self.assertEqual(str(zi), str(zi_rt))
+ self.assertIsNot(zi, zi_rt)
+ self.assertEqual(str(zi), str(zi_rt))
@hypothesis.given(key=valid_keys())
@add_key_examples
diff --git a/Lib/test/test_zstd.py b/Lib/test/test_zstd.py
index d4c28aed38e..90b2adc9665 100644
--- a/Lib/test/test_zstd.py
+++ b/Lib/test/test_zstd.py
@@ -62,15 +62,18 @@ SAMPLES = None
TRAINED_DICT = None
-SUPPORT_MULTITHREADING = False
+# Cannot be deferred to setup as it is used to check whether or not to skip
+# tests
+try:
+ SUPPORT_MULTITHREADING = CompressionParameter.nb_workers.bounds() != (0, 0)
+except Exception:
+ SUPPORT_MULTITHREADING = False
C_INT_MIN = -(2**31)
C_INT_MAX = (2**31) - 1
def setUpModule():
- global SUPPORT_MULTITHREADING
- SUPPORT_MULTITHREADING = CompressionParameter.nb_workers.bounds() != (0, 0)
# uncompressed size 130KB, more than a zstd block.
# with a frame epilogue, 4 bytes checksum.
global DAT_130K_D
diff --git a/Lib/test/typinganndata/fwdref_module.py b/Lib/test/typinganndata/fwdref_module.py
new file mode 100644
index 00000000000..7347a7a4245
--- /dev/null
+++ b/Lib/test/typinganndata/fwdref_module.py
@@ -0,0 +1,6 @@
+from typing import ForwardRef
+
+MyList = list[int]
+MyDict = dict[str, 'MyList']
+
+fw = ForwardRef('MyDict', module=__name__)
diff --git a/Lib/typing.py b/Lib/typing.py
index ed1dd4fc641..f1455c273d3 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -407,6 +407,17 @@ def _tp_cache(func=None, /, *, typed=False):
return decorator
+def _rebuild_generic_alias(alias: GenericAlias, args: tuple[object, ...]) -> GenericAlias:
+ is_unpacked = alias.__unpacked__
+ if _should_unflatten_callable_args(alias, args):
+ t = alias.__origin__[(args[:-1], args[-1])]
+ else:
+ t = alias.__origin__[args]
+ if is_unpacked:
+ t = Unpack[t]
+ return t
+
+
def _deprecation_warning_for_no_type_params_passed(funcname: str) -> None:
import warnings
@@ -426,20 +437,14 @@ class _Sentinel:
return '<sentinel>'
-_sentinel = _Sentinel()
-
-
-def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=frozenset(),
- format=None, owner=None):
+def _eval_type(t, globalns, localns, type_params, *, recursive_guard=frozenset(),
+ format=None, owner=None, parent_fwdref=None):
"""Evaluate all forward references in the given type t.
For use of globalns and localns see the docstring for get_type_hints().
recursive_guard is used to prevent infinite recursion with a recursive
ForwardRef.
"""
- if type_params is _sentinel:
- _deprecation_warning_for_no_type_params_passed("typing._eval_type")
- type_params = ()
if isinstance(t, _lazy_annotationlib.ForwardRef):
# If the forward_ref has __forward_module__ set, evaluate() infers the globals
# from the module, and it will probably pick better than the globals we have here.
@@ -451,28 +456,23 @@ def _eval_type(t, globalns, localns, type_params=_sentinel, *, recursive_guard=f
if isinstance(t, (_GenericAlias, GenericAlias, Union)):
if isinstance(t, GenericAlias):
args = tuple(
- _make_forward_ref(arg) if isinstance(arg, str) else arg
+ _make_forward_ref(arg, parent_fwdref=parent_fwdref) if isinstance(arg, str) else arg
for arg in t.__args__
)
- is_unpacked = t.__unpacked__
- if _should_unflatten_callable_args(t, args):
- t = t.__origin__[(args[:-1], args[-1])]
- else:
- t = t.__origin__[args]
- if is_unpacked:
- t = Unpack[t]
+ else:
+ args = t.__args__
ev_args = tuple(
_eval_type(
a, globalns, localns, type_params, recursive_guard=recursive_guard,
format=format, owner=owner,
)
- for a in t.__args__
+ for a in args
)
if ev_args == t.__args__:
return t
if isinstance(t, GenericAlias):
- return GenericAlias(t.__origin__, ev_args)
+ return _rebuild_generic_alias(t, ev_args)
if isinstance(t, Union):
return functools.reduce(operator.or_, ev_args)
else:
@@ -936,7 +936,12 @@ def TypeIs(self, parameters):
return _GenericAlias(self, (item,))
-def _make_forward_ref(code, **kwargs):
+def _make_forward_ref(code, *, parent_fwdref=None, **kwargs):
+ if parent_fwdref is not None:
+ if parent_fwdref.__forward_module__ is not None:
+ kwargs['module'] = parent_fwdref.__forward_module__
+ if parent_fwdref.__owner__ is not None:
+ kwargs['owner'] = parent_fwdref.__owner__
forward_ref = _lazy_annotationlib.ForwardRef(code, **kwargs)
# For compatibility, eagerly compile the forwardref's code.
forward_ref.__forward_code__
@@ -1001,6 +1006,7 @@ def evaluate_forward_ref(
recursive_guard=_recursive_guard | {forward_ref.__forward_arg__},
format=format,
owner=owner,
+ parent_fwdref=forward_ref,
)
@@ -1860,7 +1866,9 @@ def _allow_reckless_class_checks(depth=2):
The abc and functools modules indiscriminately call isinstance() and
issubclass() on the whole MRO of a user class, which may contain protocols.
"""
- return _caller(depth) in {'abc', 'functools', None}
+ # gh-136047: When `_abc` module is not available, `_py_abc` is required to
+ # allow `_py_abc.ABCMeta` fallback.
+ return _caller(depth) in {'abc', '_py_abc', 'functools', None}
_PROTO_ALLOWLIST = {
@@ -2404,7 +2412,7 @@ def _strip_annotations(t):
stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
if stripped_args == t.__args__:
return t
- return GenericAlias(t.__origin__, stripped_args)
+ return _rebuild_generic_alias(t, stripped_args)
if isinstance(t, Union):
stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
if stripped_args == t.__args__:
diff --git a/Lib/unittest/_log.py b/Lib/unittest/_log.py
index 94868e5bb95..3d69385ea24 100644
--- a/Lib/unittest/_log.py
+++ b/Lib/unittest/_log.py
@@ -30,7 +30,7 @@ class _AssertLogsContext(_BaseTestCaseContext):
LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
- def __init__(self, test_case, logger_name, level, no_logs):
+ def __init__(self, test_case, logger_name, level, no_logs, formatter=None):
_BaseTestCaseContext.__init__(self, test_case)
self.logger_name = logger_name
if level:
@@ -39,13 +39,14 @@ class _AssertLogsContext(_BaseTestCaseContext):
self.level = logging.INFO
self.msg = None
self.no_logs = no_logs
+ self.formatter = formatter
def __enter__(self):
if isinstance(self.logger_name, logging.Logger):
logger = self.logger = self.logger_name
else:
logger = self.logger = logging.getLogger(self.logger_name)
- formatter = logging.Formatter(self.LOGGING_FORMAT)
+ formatter = self.formatter or logging.Formatter(self.LOGGING_FORMAT)
handler = _CapturingHandler()
handler.setLevel(self.level)
handler.setFormatter(formatter)
diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py
index db10de68e4a..eba50839cd3 100644
--- a/Lib/unittest/case.py
+++ b/Lib/unittest/case.py
@@ -849,7 +849,7 @@ class TestCase(object):
context = _AssertNotWarnsContext(expected_warning, self)
return context.handle('_assertNotWarns', args, kwargs)
- def assertLogs(self, logger=None, level=None):
+ def assertLogs(self, logger=None, level=None, formatter=None):
"""Fail unless a log message of level *level* or higher is emitted
on *logger_name* or its children. If omitted, *level* defaults to
INFO and *logger* defaults to the root logger.
@@ -861,6 +861,8 @@ class TestCase(object):
`records` attribute will be a list of the corresponding LogRecord
objects.
+ Optionally supply `formatter` to control how messages are formatted.
+
Example::
with self.assertLogs('foo', level='INFO') as cm:
@@ -871,7 +873,7 @@ class TestCase(object):
"""
# Lazy import to avoid importing logging if it is not needed.
from ._log import _AssertLogsContext
- return _AssertLogsContext(self, logger, level, no_logs=False)
+ return _AssertLogsContext(self, logger, level, no_logs=False, formatter=formatter)
def assertNoLogs(self, logger=None, level=None):
""" Fail unless no log messages of level *level* or higher are emitted
diff --git a/Lib/unittest/mock.py b/Lib/unittest/mock.py
index e370aa48b7c..e1dbfdacf56 100644
--- a/Lib/unittest/mock.py
+++ b/Lib/unittest/mock.py
@@ -986,7 +986,7 @@ class NonCallableMock(Base):
def assert_called_once_with(self, /, *args, **kwargs):
- """assert that the mock was called exactly once and that that call was
+ """assert that the mock was called exactly once and that call was
with the specified arguments."""
if not self.call_count == 1:
msg = ("Expected '%s' to be called once. Called %s times.%s"
diff --git a/Lib/xml/etree/ElementTree.py b/Lib/xml/etree/ElementTree.py
index 44ab5d18624..dafe5b1b8a0 100644
--- a/Lib/xml/etree/ElementTree.py
+++ b/Lib/xml/etree/ElementTree.py
@@ -527,7 +527,9 @@ class ElementTree:
"""
def __init__(self, element=None, file=None):
- # assert element is None or iselement(element)
+ if element is not None and not iselement(element):
+ raise TypeError('expected an Element, not %s' %
+ type(element).__name__)
self._root = element # first node
if file:
self.parse(file)
@@ -543,7 +545,9 @@ class ElementTree:
with the given element. Use with care!
"""
- # assert iselement(element)
+ if not iselement(element):
+ raise TypeError('expected an Element, not %s'
+ % type(element).__name__)
self._root = element
def parse(self, source, parser=None):
@@ -709,6 +713,8 @@ class ElementTree:
of start/end tags
"""
+ if self._root is None:
+ raise TypeError('ElementTree not initialized')
if not method:
method = "xml"
elif method not in _serialize:
diff --git a/Lib/zoneinfo/_common.py b/Lib/zoneinfo/_common.py
index 6e05abc3239..03cc42149f9 100644
--- a/Lib/zoneinfo/_common.py
+++ b/Lib/zoneinfo/_common.py
@@ -9,9 +9,13 @@ def load_tzdata(key):
resource_name = components[-1]
try:
- return resources.files(package_name).joinpath(resource_name).open("rb")
+ path = resources.files(package_name).joinpath(resource_name)
+ # gh-85702: Prevent PermissionError on Windows
+ if path.is_dir():
+ raise IsADirectoryError
+ return path.open("rb")
except (ImportError, FileNotFoundError, UnicodeEncodeError, IsADirectoryError):
- # There are three types of exception that can be raised that all amount
+ # There are four types of exception that can be raised that all amount
# to "we cannot find this key":
#
# ImportError: If package_name doesn't exist (e.g. if tzdata is not
diff --git a/Misc/ACKS b/Misc/ACKS
index 74cf29cdbc5..d1490e1e46c 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -478,6 +478,7 @@ Dean Draayer
Fred L. Drake, Jr.
Mehdi Drissi
Derk Drukker
+Weilin Du
John DuBois
Paul Dubois
Jacques Ducasse
@@ -1481,6 +1482,7 @@ Jean-François Piéronne
Oleg Plakhotnyuk
Anatoliy Platonov
Marcel Plch
+Stefan Pochmann
Kirill Podoprigora
Remi Pointel
Jon Poler
diff --git a/Misc/NEWS.d/next/Build/2025-05-16-07-46-06.gh-issue-115119.ALBgS_.rst b/Misc/NEWS.d/next/Build/2025-05-16-07-46-06.gh-issue-115119.ALBgS_.rst
new file mode 100644
index 00000000000..8c2d15a3228
--- /dev/null
+++ b/Misc/NEWS.d/next/Build/2025-05-16-07-46-06.gh-issue-115119.ALBgS_.rst
@@ -0,0 +1,4 @@
+Removed implicit fallback to the bundled copy of the ``libmpdec`` library.
+Now this should be explicitly enabled via :option:`--with-system-libmpdec`
+set to ``no`` or :option:`!--without-system-libmpdec`. Patch by Sergey
+B Kirpichev.
diff --git a/Misc/NEWS.d/next/C_API/2025-05-20-17-13-51.gh-issue-134009.CpCmry.rst b/Misc/NEWS.d/next/C_API/2025-05-20-17-13-51.gh-issue-134009.CpCmry.rst
new file mode 100644
index 00000000000..f060f09de19
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-05-20-17-13-51.gh-issue-134009.CpCmry.rst
@@ -0,0 +1 @@
+Expose :c:func:`PyMutex_IsLocked` as part of the public C API.
diff --git a/Misc/NEWS.d/next/C_API/2025-06-19-12-47-18.gh-issue-133157.1WA85f.rst b/Misc/NEWS.d/next/C_API/2025-06-19-12-47-18.gh-issue-133157.1WA85f.rst
new file mode 100644
index 00000000000..1b37d884e57
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-06-19-12-47-18.gh-issue-133157.1WA85f.rst
@@ -0,0 +1 @@
+Remove the private, undocumented macro :c:macro:`!_Py_NO_SANITIZE_UNDEFINED`.
diff --git a/Misc/NEWS.d/next/C_API/2025-06-25-01-03-10.gh-issue-135906.UBrCWq.rst b/Misc/NEWS.d/next/C_API/2025-06-25-01-03-10.gh-issue-135906.UBrCWq.rst
new file mode 100644
index 00000000000..7852759a702
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-06-25-01-03-10.gh-issue-135906.UBrCWq.rst
@@ -0,0 +1 @@
+Fix compilation errors when compiling the internal headers with a C++ compiler.
diff --git a/Misc/NEWS.d/next/C_API/2025-07-01-16-22-39.gh-issue-135075.angu3J.rst b/Misc/NEWS.d/next/C_API/2025-07-01-16-22-39.gh-issue-135075.angu3J.rst
new file mode 100644
index 00000000000..88e0fa65f45
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-07-01-16-22-39.gh-issue-135075.angu3J.rst
@@ -0,0 +1,2 @@
+Make :c:func:`PyObject_SetAttr` and :c:func:`PyObject_SetAttrString` fail if
+called with ``NULL`` value and an exception set. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-04-19-16-22-47.gh-issue-132732.jgqhlF.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-04-19-16-22-47.gh-issue-132732.jgqhlF.rst
new file mode 100644
index 00000000000..aadaf2169fd
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-04-19-16-22-47.gh-issue-132732.jgqhlF.rst
@@ -0,0 +1 @@
+Automatically constant evaluate bytecode operations marked as pure in the JIT optimizer.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-31-19-24-54.gh-issue-134280.NDVbzY.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-31-19-24-54.gh-issue-134280.NDVbzY.rst
new file mode 100644
index 00000000000..f8227216909
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-31-19-24-54.gh-issue-134280.NDVbzY.rst
@@ -0,0 +1,2 @@
+Disable constant folding for ``~`` with a boolean argument.
+This moves the deprecation warning from compile time to runtime.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-12-11-19-52.gh-issue-135422.F6yQi6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-12-11-19-52.gh-issue-135422.F6yQi6.rst
new file mode 100644
index 00000000000..bb0f178f91a
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-12-11-19-52.gh-issue-135422.F6yQi6.rst
@@ -0,0 +1 @@
+Fix regression in :exc:`SyntaxError` messages after :gh:`134036`.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-16-03-56-15.gh-issue-135551.hRTQO-.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-16-03-56-15.gh-issue-135551.hRTQO-.rst
new file mode 100644
index 00000000000..22dda2a3e97
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-16-03-56-15.gh-issue-135551.hRTQO-.rst
@@ -0,0 +1 @@
+Sorting randomly ordered lists will often run a bit faster, thanks to a new scheme for picking minimum run lengths from Stefan Pochmann, which arranges for the merge tree to be as evenly balanced as is possible.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-17-22-34-58.gh-issue-135607.ucsLVu.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-17-22-34-58.gh-issue-135607.ucsLVu.rst
new file mode 100644
index 00000000000..859259a9ace
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-17-22-34-58.gh-issue-135607.ucsLVu.rst
@@ -0,0 +1,2 @@
+Fix potential :mod:`weakref` races in an object's destructor on the :term:`free threaded <free
+threading>` build.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-24-16-46-34.gh-issue-135904.78xfon.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-24-16-46-34.gh-issue-135904.78xfon.rst
new file mode 100644
index 00000000000..ecbd8fda9a5
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-24-16-46-34.gh-issue-135904.78xfon.rst
@@ -0,0 +1,2 @@
+Perform more aggressive control-flow optimizations on the machine code
+templates emitted by the experimental JIT compiler.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-06-26-15-25-51.gh-issue-78465.MbDN8X.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-26-15-25-51.gh-issue-78465.MbDN8X.rst
new file mode 100644
index 00000000000..99734d63c5d
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-06-26-15-25-51.gh-issue-78465.MbDN8X.rst
@@ -0,0 +1,2 @@
+Fix error message for ``cls.__new__(cls, ...)`` where ``cls`` is not
+instantiable builtin or extension type (with ``tp_new`` set to ``NULL``).
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-07-02-15-18-41.gh-issue-136203.Y934sC.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-07-02-15-18-41.gh-issue-136203.Y934sC.rst
new file mode 100644
index 00000000000..5a622bab8b8
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-07-02-15-18-41.gh-issue-136203.Y934sC.rst
@@ -0,0 +1,2 @@
+Improve :exc:`TypeError` error message, when richcomparing two
+:class:`types.MappingProxyType` objects.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-07-06-14-53-19.gh-issue-109700.KVNQQi.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-07-06-14-53-19.gh-issue-109700.KVNQQi.rst
new file mode 100644
index 00000000000..a37f4a51050
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-07-06-14-53-19.gh-issue-109700.KVNQQi.rst
@@ -0,0 +1 @@
+Fix memory error handling in :c:func:`PyDict_SetDefault`.
diff --git a/Misc/NEWS.d/next/Documentation/2025-07-01-21-04-47.gh-issue-136155.ufmH4Q.rst b/Misc/NEWS.d/next/Documentation/2025-07-01-21-04-47.gh-issue-136155.ufmH4Q.rst
new file mode 100644
index 00000000000..0341b5f7f0d
--- /dev/null
+++ b/Misc/NEWS.d/next/Documentation/2025-07-01-21-04-47.gh-issue-136155.ufmH4Q.rst
@@ -0,0 +1 @@
+EPUB builds are fixed by excluding non-XHTML-compatible tags.
diff --git a/Misc/NEWS.d/next/Library/2025-03-11-05-24-14.gh-issue-130664.g0yNMm.rst b/Misc/NEWS.d/next/Library/2025-03-11-05-24-14.gh-issue-130664.g0yNMm.rst
new file mode 100644
index 00000000000..dbe783a2a99
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-03-11-05-24-14.gh-issue-130664.g0yNMm.rst
@@ -0,0 +1,4 @@
+Handle corner-case for :class:`~fractions.Fraction`'s formatting: treat
+zero-padding (preceding the width field by a zero (``'0'``) character) as an
+equivalent to a fill character of ``'0'`` with an alignment type of ``'='``,
+just as in case of :class:`float`'s.
diff --git a/Misc/NEWS.d/next/Library/2025-04-07-09-53-54.gh-issue-87790.6nj3zQ.rst b/Misc/NEWS.d/next/Library/2025-04-07-09-53-54.gh-issue-87790.6nj3zQ.rst
new file mode 100644
index 00000000000..cf80c71271b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-07-09-53-54.gh-issue-87790.6nj3zQ.rst
@@ -0,0 +1,2 @@
+Support underscore and comma as thousands separators in the fractional part
+for :class:`~decimal.Decimal`'s formatting. Patch by Sergey B Kirpichev.
diff --git a/Misc/NEWS.d/next/Library/2025-04-07-10-20-16.gh-issue-87790.X2SjJe.rst b/Misc/NEWS.d/next/Library/2025-04-07-10-20-16.gh-issue-87790.X2SjJe.rst
new file mode 100644
index 00000000000..be2a30d69ca
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-07-10-20-16.gh-issue-87790.X2SjJe.rst
@@ -0,0 +1,2 @@
+Support underscore and comma as thousands separators in the fractional part
+for :class:`~fractions.Fraction`'s formatting. Patch by Sergey B Kirpichev.
diff --git a/Misc/NEWS.d/next/Library/2025-04-22-21-00-23.gh-issue-123471.asOLA2.rst b/Misc/NEWS.d/next/Library/2025-04-22-21-00-23.gh-issue-123471.asOLA2.rst
new file mode 100644
index 00000000000..a4b4b6d2c23
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-22-21-00-23.gh-issue-123471.asOLA2.rst
@@ -0,0 +1 @@
+Make concurrent iterations over :class:`itertools.combinations` and :class:`itertools.product` safe under free-threading.
diff --git a/Misc/NEWS.d/next/Library/2025-06-03-12-59-17.gh-issue-135069.xop30V.rst b/Misc/NEWS.d/next/Library/2025-06-03-12-59-17.gh-issue-135069.xop30V.rst
new file mode 100644
index 00000000000..1affb5e2aad
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-03-12-59-17.gh-issue-135069.xop30V.rst
@@ -0,0 +1,3 @@
+Fix the "Invalid error handling" exception in
+:class:`!encodings.idna.IncrementalDecoder` to correctly replace the
+'errors' parameter.
diff --git a/Misc/NEWS.d/next/Library/2025-06-10-10-22-18.gh-issue-130870.JipqbO.rst b/Misc/NEWS.d/next/Library/2025-06-10-10-22-18.gh-issue-130870.JipqbO.rst
new file mode 100644
index 00000000000..64173285e08
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-10-10-22-18.gh-issue-130870.JipqbO.rst
@@ -0,0 +1,2 @@
+Preserve :class:`types.GenericAlias` subclasses in
+:func:`typing.get_type_hints`
diff --git a/Misc/NEWS.d/next/Library/2025-06-18-19-25-32.gh-issue-123471.lx1Xbt.rst b/Misc/NEWS.d/next/Library/2025-06-18-19-25-32.gh-issue-123471.lx1Xbt.rst
new file mode 100644
index 00000000000..6f395024a9e
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-18-19-25-32.gh-issue-123471.lx1Xbt.rst
@@ -0,0 +1 @@
+Make concurrent iterations over :class:`itertools.chain` safe under :term:`free threading`.
diff --git a/Misc/NEWS.d/next/Library/2025-06-22-02-16-17.gh-issue-135640.FXyFL6.rst b/Misc/NEWS.d/next/Library/2025-06-22-02-16-17.gh-issue-135640.FXyFL6.rst
new file mode 100644
index 00000000000..ad217b57b4b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-22-02-16-17.gh-issue-135640.FXyFL6.rst
@@ -0,0 +1,4 @@
+Address bug where it was possible to call
+:func:`xml.etree.ElementTree.ElementTree.write` on an ElementTree object with
+an invalid root element. This behavior blanked the file passed to ``write``
+if it already existed.
diff --git a/Misc/NEWS.d/next/Library/2025-06-23-11-04-25.gh-issue-135836.-C-c4v.rst b/Misc/NEWS.d/next/Library/2025-06-23-11-04-25.gh-issue-135836.-C-c4v.rst
new file mode 100644
index 00000000000..f93c9faee58
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-23-11-04-25.gh-issue-135836.-C-c4v.rst
@@ -0,0 +1 @@
+Fix :exc:`IndexError` in :meth:`asyncio.loop.create_connection` that could occur when the Happy Eyeballs algorithm resulted in an empty exceptions list during connection attempts.
diff --git a/Misc/NEWS.d/next/Library/2025-06-24-10-23-37.gh-issue-135853.6xDNOG.rst b/Misc/NEWS.d/next/Library/2025-06-24-10-23-37.gh-issue-135853.6xDNOG.rst
new file mode 100644
index 00000000000..3fea3bc3e7c
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-24-10-23-37.gh-issue-135853.6xDNOG.rst
@@ -0,0 +1,2 @@
+:mod:`math`: expose C99 :func:`~math.signbit` function to determine whether
+the sign bit of a floating-point value is set. Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-06-24-10-52-35.gh-issue-135836.s37351.rst b/Misc/NEWS.d/next/Library/2025-06-24-10-52-35.gh-issue-135836.s37351.rst
new file mode 100644
index 00000000000..1d1e7a2298c
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-24-10-52-35.gh-issue-135836.s37351.rst
@@ -0,0 +1,3 @@
+Fix :exc:`IndexError` in :meth:`asyncio.loop.create_connection` that could
+occur when non-\ :exc:`OSError` exception is raised during connection and
+socket's ``close()`` raises :exc:`!OSError`.
diff --git a/Misc/NEWS.d/next/Library/2025-06-26-11-52-40.gh-issue-53203.TMigBr.rst b/Misc/NEWS.d/next/Library/2025-06-26-11-52-40.gh-issue-53203.TMigBr.rst
new file mode 100644
index 00000000000..ba2fae49fdc
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-26-11-52-40.gh-issue-53203.TMigBr.rst
@@ -0,0 +1,2 @@
+Fix :func:`time.strptime` for ``%c`` and ``%x`` formats on locales byn_ER,
+wal_ET and lzh_TW, and for ``%X`` format on locales ar_SA, bg_BG and lzh_TW.
diff --git a/Misc/NEWS.d/next/Library/2025-06-26-17-19-36.gh-issue-105456.eR9oHB.rst b/Misc/NEWS.d/next/Library/2025-06-26-17-19-36.gh-issue-105456.eR9oHB.rst
new file mode 100644
index 00000000000..772403a240a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-26-17-19-36.gh-issue-105456.eR9oHB.rst
@@ -0,0 +1,2 @@
+Removed :mod:`!sre_compile`, :mod:`!sre_constants` and :mod:`!sre_parse`
+modules.
diff --git a/Misc/NEWS.d/next/Library/2025-06-26-17-28-49.gh-issue-135995.pPrDCt.rst b/Misc/NEWS.d/next/Library/2025-06-26-17-28-49.gh-issue-135995.pPrDCt.rst
new file mode 100644
index 00000000000..998b3cd85b1
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-26-17-28-49.gh-issue-135995.pPrDCt.rst
@@ -0,0 +1 @@
+In the palmos encoding, make byte ``0x9b`` decode to ``›`` (U+203A - SINGLE RIGHT-POINTING ANGLE QUOTATION MARK).
diff --git a/Misc/NEWS.d/next/Library/2025-06-27-09-26-04.gh-issue-87135.33z0UW.rst b/Misc/NEWS.d/next/Library/2025-06-27-09-26-04.gh-issue-87135.33z0UW.rst
new file mode 100644
index 00000000000..4b6bc74cad8
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-27-09-26-04.gh-issue-87135.33z0UW.rst
@@ -0,0 +1,3 @@
+Acquiring a :class:`threading.Lock` or :class:`threading.RLock` at interpreter
+shutdown will raise :exc:`PythonFinalizationError` if Python can determine
+that it would otherwise deadlock.
diff --git a/Misc/NEWS.d/next/Library/2025-06-27-13-34-28.gh-issue-136028.RY727g.rst b/Misc/NEWS.d/next/Library/2025-06-27-13-34-28.gh-issue-136028.RY727g.rst
new file mode 100644
index 00000000000..9859df7cf6a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-27-13-34-28.gh-issue-136028.RY727g.rst
@@ -0,0 +1,3 @@
+Fix parsing month names containing "İ" (U+0130, LATIN CAPITAL LETTER I WITH
+DOT ABOVE) in :func:`time.strptime`. This affects locales az_AZ, ber_DZ,
+ber_MA and crh_UA.
diff --git a/Misc/NEWS.d/next/Library/2025-06-30-11-12-24.gh-issue-85702.0Lrbwu.rst b/Misc/NEWS.d/next/Library/2025-06-30-11-12-24.gh-issue-85702.0Lrbwu.rst
new file mode 100644
index 00000000000..fc13eb1d9e0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-30-11-12-24.gh-issue-85702.0Lrbwu.rst
@@ -0,0 +1,3 @@
+If ``zoneinfo._common.load_tzdata`` is given a package without a resource a
+:exc:`zoneinfo.ZoneInfoNotFoundError` is raised rather than a :exc:`PermissionError`.
+Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/Library/2025-07-02-10-48-21.gh-issue-136193.xfvras.rst b/Misc/NEWS.d/next/Library/2025-07-02-10-48-21.gh-issue-136193.xfvras.rst
new file mode 100644
index 00000000000..801115202d0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-07-02-10-48-21.gh-issue-136193.xfvras.rst
@@ -0,0 +1,2 @@
+Improve :exc:`TypeError` error message, when richcomparing two
+:class:`types.SimpleNamespace` objects.
diff --git a/Misc/NEWS.d/next/Library/2025-07-02-18-41-45.gh-issue-133982.7qqAn6.rst b/Misc/NEWS.d/next/Library/2025-07-02-18-41-45.gh-issue-133982.7qqAn6.rst
new file mode 100644
index 00000000000..a2d0810cebe
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-07-02-18-41-45.gh-issue-133982.7qqAn6.rst
@@ -0,0 +1 @@
+Update Python implementation of :class:`io.BytesIO` to be thread safe.
diff --git a/Misc/NEWS.d/next/Library/2025-07-05-06-56-16.gh-issue-136316.3zj_Do.rst b/Misc/NEWS.d/next/Library/2025-07-05-06-56-16.gh-issue-136316.3zj_Do.rst
new file mode 100644
index 00000000000..dd5cecdf3a1
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-07-05-06-56-16.gh-issue-136316.3zj_Do.rst
@@ -0,0 +1,2 @@
+Improve support for evaluating nested forward references in
+:func:`typing.evaluate_forward_ref`.
diff --git a/Misc/NEWS.d/next/Library/2025-07-05-06-59-46.gh-issue-136047.qWvycf.rst b/Misc/NEWS.d/next/Library/2025-07-05-06-59-46.gh-issue-136047.qWvycf.rst
new file mode 100644
index 00000000000..1a381860914
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-07-05-06-59-46.gh-issue-136047.qWvycf.rst
@@ -0,0 +1,2 @@
+Fix issues with :mod:`typing` when the C implementation of :mod:`abc` is not
+available.
diff --git a/Misc/NEWS.d/next/Library/2025-07-05-09-45-04.gh-issue-136286.N67Amr.rst b/Misc/NEWS.d/next/Library/2025-07-05-09-45-04.gh-issue-136286.N67Amr.rst
new file mode 100644
index 00000000000..0a0d66ac0b8
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-07-05-09-45-04.gh-issue-136286.N67Amr.rst
@@ -0,0 +1,2 @@
+Fix pickling failures for protocols 0 and 1 for many objects realted to
+subinterpreters.
diff --git a/Misc/NEWS.d/next/Library/2025-07-06-10-18-48.gh-issue-136021.f-FJYT.rst b/Misc/NEWS.d/next/Library/2025-07-06-10-18-48.gh-issue-136021.f-FJYT.rst
new file mode 100644
index 00000000000..39a848c11eb
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-07-06-10-18-48.gh-issue-136021.f-FJYT.rst
@@ -0,0 +1,3 @@
+Make ``type_params`` parameter required in :func:`!typing._eval_type` after
+a deprecation period for not providing this parameter. Also remove the
+:exc:`DeprecationWarning` for the old behavior.
diff --git a/Misc/NEWS.d/next/Security/2025-06-18-13-28-08.gh-issue-102555.nADrzJ.rst b/Misc/NEWS.d/next/Security/2025-06-18-13-28-08.gh-issue-102555.nADrzJ.rst
new file mode 100644
index 00000000000..71d15ee0852
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2025-06-18-13-28-08.gh-issue-102555.nADrzJ.rst
@@ -0,0 +1,3 @@
+Fix comment parsing in :class:`html.parser.HTMLParser` according to the
+HTML5 standard. ``--!>`` now ends the comment. ``-- >`` no longer ends the
+comment. Support abnormally ended empty comments ``<-->`` and ``<--->``.
diff --git a/Misc/NEWS.d/next/Security/2025-06-25-14-13-39.gh-issue-135661.idjQ0B.rst b/Misc/NEWS.d/next/Security/2025-06-25-14-13-39.gh-issue-135661.idjQ0B.rst
new file mode 100644
index 00000000000..b6f9e104e44
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2025-06-25-14-13-39.gh-issue-135661.idjQ0B.rst
@@ -0,0 +1,25 @@
+Fix parsing start and end tags in :class:`html.parser.HTMLParser`
+according to the HTML5 standard.
+
+* Whitespaces no longer accepted between ``</`` and the tag name.
+ E.g. ``</ script>`` does not end the script section.
+
+* Vertical tabulation (``\v``) and non-ASCII whitespaces no longer recognized
+ as whitespaces. The only whitespaces are ``\t\n\r\f`` and space.
+
+* Null character (U+0000) no longer ends the tag name.
+
+* Attributes and slashes after the tag name in end tags are now ignored,
+ instead of terminating after the first ``>`` in quoted attribute value.
+ E.g. ``</script/foo=">"/>``.
+
+* Multiple slashes and whitespaces between the last attribute and closing ``>``
+ are now ignored in both start and end tags. E.g. ``<a foo=bar/ //>``.
+
+* Multiple ``=`` between attribute name and value are no longer collapsed.
+ E.g. ``<a foo==bar>`` produces attribute "foo" with value "=bar".
+
+* Whitespaces between the ``=`` separator and attribute name or value are no
+ longer ignored. E.g. ``<a foo =bar>`` produces two attributes "foo" and
+ "=bar", both with value None; ``<a foo= bar>`` produces two attributes:
+ "foo" with value "" and "bar" with value None.
diff --git a/Misc/NEWS.d/next/Security/2025-06-27-21-23-19.gh-issue-136053.QZxcee.rst b/Misc/NEWS.d/next/Security/2025-06-27-21-23-19.gh-issue-136053.QZxcee.rst
new file mode 100644
index 00000000000..93caed3aa3b
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2025-06-27-21-23-19.gh-issue-136053.QZxcee.rst
@@ -0,0 +1 @@
+:mod:`marshal`: fix a possible crash when deserializing :class:`slice` objects.
diff --git a/Misc/NEWS.d/next/Tests/2025-05-23-09-19-52.gh-issue-134567.hwEIMb.rst b/Misc/NEWS.d/next/Tests/2025-05-23-09-19-52.gh-issue-134567.hwEIMb.rst
new file mode 100644
index 00000000000..42e4a01c0cc
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2025-05-23-09-19-52.gh-issue-134567.hwEIMb.rst
@@ -0,0 +1,2 @@
+Expose log formatter to users in TestCase.assertLogs.
+:func:`unittest.TestCase.assertLogs` will now optionally accept a formatter that will be used to format the strings in output if provided.
diff --git a/Misc/NEWS.d/next/Tests/2025-06-26-15-15-35.gh-issue-135966.EBpF8Y.rst b/Misc/NEWS.d/next/Tests/2025-06-26-15-15-35.gh-issue-135966.EBpF8Y.rst
new file mode 100644
index 00000000000..8dc007431f3
--- /dev/null
+++ b/Misc/NEWS.d/next/Tests/2025-06-26-15-15-35.gh-issue-135966.EBpF8Y.rst
@@ -0,0 +1 @@
+The iOS testbed now handles the ``app_packages`` folder as a site directory.
diff --git a/Misc/NEWS.d/next/Tools-Demos/2025-06-26-15-58-13.gh-issue-135968.C4v_-W.rst b/Misc/NEWS.d/next/Tools-Demos/2025-06-26-15-58-13.gh-issue-135968.C4v_-W.rst
new file mode 100644
index 00000000000..1c0b3825c71
--- /dev/null
+++ b/Misc/NEWS.d/next/Tools-Demos/2025-06-26-15-58-13.gh-issue-135968.C4v_-W.rst
@@ -0,0 +1 @@
+Stubs for ``strip`` are now provided as part of an iOS install.
diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c
index ad670293ec5..3ba48d5d9d3 100644
--- a/Modules/_collectionsmodule.c
+++ b/Modules/_collectionsmodule.c
@@ -5,6 +5,7 @@
#include "pycore_moduleobject.h" // _PyModule_GetState()
#include "pycore_pyatomic_ft_wrappers.h"
#include "pycore_typeobject.h" // _PyType_GetModuleState()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h>
@@ -1532,9 +1533,7 @@ deque_dealloc(PyObject *self)
Py_ssize_t i;
PyObject_GC_UnTrack(deque);
- if (deque->weakreflist != NULL) {
- PyObject_ClearWeakRefs(self);
- }
+ FT_CLEAR_WEAKREFS(self, deque->weakreflist);
if (deque->leftblock != NULL) {
(void)deque_clear(self);
assert(deque->leftblock != NULL);
diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c
index eb90be81c8d..7a6426593d0 100644
--- a/Modules/_datetimemodule.c
+++ b/Modules/_datetimemodule.c
@@ -1934,7 +1934,7 @@ wrap_strftime(PyObject *object, PyObject *format, PyObject *timetuple,
}
replacement = freplacement;
}
-#ifdef Py_NORMALIZE_CENTURY
+#ifdef _Py_NORMALIZE_CENTURY
else if (ch == 'Y' || ch == 'G'
|| ch == 'F' || ch == 'C'
) {
diff --git a/Modules/_elementtree.c b/Modules/_elementtree.c
index fe24629f9f6..b9e12ab2026 100644
--- a/Modules/_elementtree.c
+++ b/Modules/_elementtree.c
@@ -17,6 +17,7 @@
#include "Python.h"
#include "pycore_pyhash.h" // _Py_HashSecret
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h> // offsetof()
#include "expat.h"
@@ -690,8 +691,7 @@ element_dealloc(PyObject *op)
/* bpo-31095: UnTrack is needed before calling any callbacks */
PyObject_GC_UnTrack(self);
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
/* element_gc_clear clears all references and deallocates extra
*/
diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c
index 354dbad84b5..d3dabd58b89 100644
--- a/Modules/_functoolsmodule.c
+++ b/Modules/_functoolsmodule.c
@@ -7,6 +7,7 @@
#include "pycore_pyatomic_ft_wrappers.h"
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_tuple.h" // _PyTuple_ITEMS()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "clinic/_functoolsmodule.c.h"
@@ -351,9 +352,7 @@ partial_dealloc(PyObject *self)
PyTypeObject *tp = Py_TYPE(self);
/* bpo-31095: UnTrack is needed before calling any callbacks */
PyObject_GC_UnTrack(self);
- if (partialobject_CAST(self)->weakreflist != NULL) {
- PyObject_ClearWeakRefs(self);
- }
+ FT_CLEAR_WEAKREFS(self, partialobject_CAST(self)->weakreflist);
(void)partial_clear(self);
tp->tp_free(self);
Py_DECREF(tp);
@@ -1621,9 +1620,7 @@ lru_cache_dealloc(PyObject *op)
PyTypeObject *tp = Py_TYPE(obj);
/* bpo-31095: UnTrack is needed before calling any callbacks */
PyObject_GC_UnTrack(obj);
- if (obj->weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, obj->weakreflist);
(void)lru_cache_tp_clear(op);
tp->tp_free(obj);
diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c
index 560fe431fca..05d01acd771 100644
--- a/Modules/_heapqmodule.c
+++ b/Modules/_heapqmodule.c
@@ -463,11 +463,11 @@ siftup_max(PyListObject *heap, Py_ssize_t pos)
return -1;
}
- /* Bubble up the smaller child until hitting a leaf. */
+ /* Bubble up the larger child until hitting a leaf. */
arr = _PyList_ITEMS(heap);
limit = endpos >> 1; /* smallest pos that has no child */
while (pos < limit) {
- /* Set childpos to index of smaller child. */
+ /* Set childpos to index of larger child. */
childpos = 2*pos + 1; /* leftmost child position */
if (childpos + 1 < endpos) {
PyObject* a = arr[childpos + 1];
@@ -487,7 +487,7 @@ siftup_max(PyListObject *heap, Py_ssize_t pos)
return -1;
}
}
- /* Move the smaller child up. */
+ /* Move the larger child up. */
tmp1 = arr[childpos];
tmp2 = arr[pos];
FT_ATOMIC_STORE_PTR_RELAXED(arr[childpos], tmp2);
diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c
index 4724e97982f..25c8bf8b3d5 100644
--- a/Modules/_io/bufferedio.c
+++ b/Modules/_io/bufferedio.c
@@ -13,6 +13,7 @@
#include "pycore_object.h" // _PyObject_GC_UNTRACK()
#include "pycore_pyerrors.h" // _Py_FatalErrorFormat()
#include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "_iomodule.h"
@@ -421,8 +422,7 @@ buffered_dealloc(PyObject *op)
return;
_PyObject_GC_UNTRACK(self);
self->ok = 0;
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
if (self->buffer) {
PyMem_Free(self->buffer);
self->buffer = NULL;
@@ -2312,8 +2312,7 @@ bufferedrwpair_dealloc(PyObject *op)
rwpair *self = rwpair_CAST(op);
PyTypeObject *tp = Py_TYPE(self);
_PyObject_GC_UNTRACK(self);
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
(void)bufferedrwpair_clear(op);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_io/bytesio.c b/Modules/_io/bytesio.c
index 61cfec435fe..1c71bce4fbb 100644
--- a/Modules/_io/bytesio.c
+++ b/Modules/_io/bytesio.c
@@ -3,6 +3,7 @@
#include "pycore_object.h"
#include "pycore_pyatomic_ft_wrappers.h"
#include "pycore_sysmodule.h" // _PySys_GetSizeOf()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h> // offsetof()
#include "_iomodule.h"
@@ -979,8 +980,7 @@ bytesio_dealloc(PyObject *op)
}
Py_CLEAR(self->buf);
Py_CLEAR(self->dict);
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
tp->tp_free(self);
Py_DECREF(tp);
}
diff --git a/Modules/_io/fileio.c b/Modules/_io/fileio.c
index 8fcb27049d6..26537fc6395 100644
--- a/Modules/_io/fileio.c
+++ b/Modules/_io/fileio.c
@@ -4,6 +4,7 @@
#include "pycore_fileutils.h" // _Py_BEGIN_SUPPRESS_IPH
#include "pycore_object.h" // _PyObject_GC_UNTRACK()
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stdbool.h> // bool
#ifdef HAVE_UNISTD_H
@@ -570,9 +571,7 @@ fileio_dealloc(PyObject *op)
PyMem_Free(self->stat_atopen);
self->stat_atopen = NULL;
}
- if (self->weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
(void)fileio_clear(op);
PyTypeObject *tp = Py_TYPE(op);
diff --git a/Modules/_io/iobase.c b/Modules/_io/iobase.c
index cd4c7e7cead..044f6b7803c 100644
--- a/Modules/_io/iobase.c
+++ b/Modules/_io/iobase.c
@@ -14,6 +14,7 @@
#include "pycore_long.h" // _PyLong_GetOne()
#include "pycore_object.h" // _PyType_HasFeature()
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h> // offsetof()
#include "_iomodule.h"
@@ -383,8 +384,7 @@ iobase_dealloc(PyObject *op)
}
PyTypeObject *tp = Py_TYPE(self);
_PyObject_GC_UNTRACK(self);
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
Py_CLEAR(self->dict);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_io/stringio.c b/Modules/_io/stringio.c
index 56913fafefb..20b7cfc0088 100644
--- a/Modules/_io/stringio.c
+++ b/Modules/_io/stringio.c
@@ -1,6 +1,7 @@
#include "Python.h"
#include <stddef.h> // offsetof()
#include "pycore_object.h"
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "_iomodule.h"
/* Implementation note: the buffer is always at least one character longer
@@ -638,9 +639,7 @@ stringio_dealloc(PyObject *op)
}
PyUnicodeWriter_Discard(self->writer);
(void)stringio_clear(op);
- if (self->weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
tp->tp_free(self);
Py_DECREF(tp);
}
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
index 3808ecdceb9..5354cf63442 100644
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -16,6 +16,7 @@
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_unicodeobject.h" // _PyUnicode_AsASCIIString()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "_iomodule.h"
@@ -1469,8 +1470,7 @@ textiowrapper_dealloc(PyObject *op)
return;
self->ok = 0;
_PyObject_GC_UNTRACK(self);
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
(void)textiowrapper_clear(op);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_io/winconsoleio.c b/Modules/_io/winconsoleio.c
index 3e783b9da45..950b7fe241c 100644
--- a/Modules/_io/winconsoleio.c
+++ b/Modules/_io/winconsoleio.c
@@ -10,6 +10,7 @@
#include "pycore_fileutils.h" // _Py_BEGIN_SUPPRESS_IPH
#include "pycore_object.h" // _PyObject_GC_UNTRACK()
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#ifdef HAVE_WINDOWS_CONSOLE_IO
@@ -518,8 +519,7 @@ winconsoleio_dealloc(PyObject *op)
if (_PyIOBase_finalize(op) < 0)
return;
_PyObject_GC_UNTRACK(self);
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
Py_CLEAR(self->dict);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_localemodule.c b/Modules/_localemodule.c
index c1f56008b7c..41e6d48b1db 100644
--- a/Modules/_localemodule.c
+++ b/Modules/_localemodule.c
@@ -692,7 +692,7 @@ _locale_nl_langinfo_impl(PyObject *module, int item)
result = result != NULL ? result : "";
char *oldloc = NULL;
if (langinfo_constants[i].category != LC_CTYPE
- && (
+ && *result && (
#ifdef __GLIBC__
// gh-133740: Always change the locale for ALT_DIGITS and ERA
# ifdef ALT_DIGITS
diff --git a/Modules/_queuemodule.c b/Modules/_queuemodule.c
index 3ee14b61b82..01235c77bd7 100644
--- a/Modules/_queuemodule.c
+++ b/Modules/_queuemodule.c
@@ -7,6 +7,7 @@
#include "pycore_moduleobject.h" // _PyModule_GetState()
#include "pycore_parking_lot.h"
#include "pycore_time.h" // _PyTime_FromSecondsObject()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stdbool.h>
#include <stddef.h> // offsetof()
@@ -221,9 +222,7 @@ simplequeue_dealloc(PyObject *op)
PyObject_GC_UnTrack(self);
(void)simplequeue_clear(op);
- if (self->weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
tp->tp_free(self);
Py_DECREF(tp);
}
diff --git a/Modules/_remote_debugging_module.c b/Modules/_remote_debugging_module.c
index c2421cac6bd..ce7189637c2 100644
--- a/Modules/_remote_debugging_module.c
+++ b/Modules/_remote_debugging_module.c
@@ -64,12 +64,14 @@
#endif
#ifdef Py_GIL_DISABLED
-#define INTERP_STATE_MIN_SIZE MAX(MAX(offsetof(PyInterpreterState, _code_object_generation) + sizeof(uint64_t), \
- offsetof(PyInterpreterState, tlbc_indices.tlbc_generation) + sizeof(uint32_t)), \
- offsetof(PyInterpreterState, threads.head) + sizeof(void*))
+#define INTERP_STATE_MIN_SIZE MAX(MAX(MAX(offsetof(PyInterpreterState, _code_object_generation) + sizeof(uint64_t), \
+ offsetof(PyInterpreterState, tlbc_indices.tlbc_generation) + sizeof(uint32_t)), \
+ offsetof(PyInterpreterState, threads.head) + sizeof(void*)), \
+ offsetof(PyInterpreterState, _gil.last_holder) + sizeof(PyThreadState*))
#else
-#define INTERP_STATE_MIN_SIZE MAX(offsetof(PyInterpreterState, _code_object_generation) + sizeof(uint64_t), \
- offsetof(PyInterpreterState, threads.head) + sizeof(void*))
+#define INTERP_STATE_MIN_SIZE MAX(MAX(offsetof(PyInterpreterState, _code_object_generation) + sizeof(uint64_t), \
+ offsetof(PyInterpreterState, threads.head) + sizeof(void*)), \
+ offsetof(PyInterpreterState, _gil.last_holder) + sizeof(PyThreadState*))
#endif
#define INTERP_STATE_BUFFER_SIZE MAX(INTERP_STATE_MIN_SIZE, 256)
@@ -206,6 +208,7 @@ typedef struct {
uint64_t code_object_generation;
_Py_hashtable_t *code_object_cache;
int debug;
+ int only_active_thread;
RemoteDebuggingState *cached_state; // Cached module state
#ifdef Py_GIL_DISABLED
// TLBC cache invalidation tracking
@@ -2496,6 +2499,7 @@ _remote_debugging.RemoteUnwinder.__init__
pid: int
*
all_threads: bool = False
+ only_active_thread: bool = False
debug: bool = False
Initialize a new RemoteUnwinder object for debugging a remote Python process.
@@ -2504,6 +2508,8 @@ Args:
pid: Process ID of the target Python process to debug
all_threads: If True, initialize state for all threads in the process.
If False, only initialize for the main thread.
+ only_active_thread: If True, only sample the thread holding the GIL.
+ Cannot be used together with all_threads=True.
debug: If True, chain exceptions to explain the sequence of events that
lead to the exception.
@@ -2514,15 +2520,33 @@ Raises:
PermissionError: If access to the target process is denied
OSError: If unable to attach to the target process or access its memory
RuntimeError: If unable to read debug information from the target process
+ ValueError: If both all_threads and only_active_thread are True
[clinic start generated code]*/
static int
_remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
int pid, int all_threads,
+ int only_active_thread,
int debug)
-/*[clinic end generated code: output=3982f2a7eba49334 input=48a762566b828e91]*/
+/*[clinic end generated code: output=13ba77598ecdcbe1 input=8f8f12504e17da04]*/
{
+ // Validate that all_threads and only_active_thread are not both True
+ if (all_threads && only_active_thread) {
+ PyErr_SetString(PyExc_ValueError,
+ "all_threads and only_active_thread cannot both be True");
+ return -1;
+ }
+
+#ifdef Py_GIL_DISABLED
+ if (only_active_thread) {
+ PyErr_SetString(PyExc_ValueError,
+ "only_active_thread is not supported when Py_GIL_DISABLED is not defined");
+ return -1;
+ }
+#endif
+
self->debug = debug;
+ self->only_active_thread = only_active_thread;
self->cached_state = NULL;
if (_Py_RemoteDebug_InitProcHandle(&self->handle, pid) < 0) {
set_exception_cause(self, PyExc_RuntimeError, "Failed to initialize process handle");
@@ -2602,13 +2626,18 @@ _remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
@critical_section
_remote_debugging.RemoteUnwinder.get_stack_trace
-Returns a list of stack traces for all threads in the target process.
+Returns a list of stack traces for threads in the target process.
Each element in the returned list is a tuple of (thread_id, frame_list), where:
- thread_id is the OS thread identifier
- frame_list is a list of tuples (function_name, filename, line_number) representing
the Python stack frames for that thread, ordered from most recent to oldest
+The threads returned depend on the initialization parameters:
+- If only_active_thread was True: returns only the thread holding the GIL
+- If all_threads was True: returns all threads
+- Otherwise: returns only the main thread
+
Example:
[
(1234, [
@@ -2632,7 +2661,7 @@ Raises:
static PyObject *
_remote_debugging_RemoteUnwinder_get_stack_trace_impl(RemoteUnwinderObject *self)
-/*[clinic end generated code: output=666192b90c69d567 input=331dbe370578badf]*/
+/*[clinic end generated code: output=666192b90c69d567 input=f756f341206f9116]*/
{
PyObject* result = NULL;
// Read interpreter state into opaque buffer
@@ -2655,6 +2684,28 @@ _remote_debugging_RemoteUnwinder_get_stack_trace_impl(RemoteUnwinderObject *self
_Py_hashtable_clear(self->code_object_cache);
}
+ // If only_active_thread is true, we need to determine which thread holds the GIL
+ PyThreadState* gil_holder = NULL;
+ if (self->only_active_thread) {
+ // The GIL state is already in interp_state_buffer, just read from there
+ // Check if GIL is locked
+ int gil_locked = GET_MEMBER(int, interp_state_buffer,
+ self->debug_offsets.interpreter_state.gil_runtime_state_locked);
+
+ if (gil_locked) {
+ // Get the last holder (current holder when GIL is locked)
+ gil_holder = GET_MEMBER(PyThreadState*, interp_state_buffer,
+ self->debug_offsets.interpreter_state.gil_runtime_state_holder);
+ } else {
+ // GIL is not locked, return empty list
+ result = PyList_New(0);
+ if (!result) {
+ set_exception_cause(self, PyExc_MemoryError, "Failed to create empty result list");
+ }
+ goto exit;
+ }
+ }
+
#ifdef Py_GIL_DISABLED
// Check TLBC generation and invalidate cache if needed
uint32_t current_tlbc_generation = GET_MEMBER(uint32_t, interp_state_buffer,
@@ -2666,7 +2717,10 @@ _remote_debugging_RemoteUnwinder_get_stack_trace_impl(RemoteUnwinderObject *self
#endif
uintptr_t current_tstate;
- if (self->tstate_addr == 0) {
+ if (self->only_active_thread && gil_holder != NULL) {
+ // We have the GIL holder, process only that thread
+ current_tstate = (uintptr_t)gil_holder;
+ } else if (self->tstate_addr == 0) {
// Get threads head from buffer
current_tstate = GET_MEMBER(uintptr_t, interp_state_buffer,
self->debug_offsets.interpreter_state.threads_head);
@@ -2700,10 +2754,14 @@ _remote_debugging_RemoteUnwinder_get_stack_trace_impl(RemoteUnwinderObject *self
if (self->tstate_addr) {
break;
}
+
+ // If we're only processing the GIL holder, we're done after one iteration
+ if (self->only_active_thread && gil_holder != NULL) {
+ break;
+ }
}
exit:
- _Py_RemoteDebug_ClearCache(&self->handle);
return result;
}
@@ -2827,11 +2885,9 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
goto result_err;
}
- _Py_RemoteDebug_ClearCache(&self->handle);
return result;
result_err:
- _Py_RemoteDebug_ClearCache(&self->handle);
Py_XDECREF(result);
return NULL;
}
@@ -2898,11 +2954,9 @@ _remote_debugging_RemoteUnwinder_get_async_stack_trace_impl(RemoteUnwinderObject
goto cleanup;
}
- _Py_RemoteDebug_ClearCache(&self->handle);
return result;
cleanup:
- _Py_RemoteDebug_ClearCache(&self->handle);
Py_XDECREF(result);
return NULL;
}
@@ -2928,7 +2982,6 @@ RemoteUnwinder_dealloc(PyObject *op)
}
#endif
if (self->handle.pid != 0) {
- _Py_RemoteDebug_ClearCache(&self->handle);
_Py_RemoteDebug_CleanupProcHandle(&self->handle);
}
PyObject_Del(self);
diff --git a/Modules/_sqlite/blob.c b/Modules/_sqlite/blob.c
index 35d090e3ca2..aafefbf316e 100644
--- a/Modules/_sqlite/blob.c
+++ b/Modules/_sqlite/blob.c
@@ -4,6 +4,7 @@
#include "blob.h"
#include "util.h"
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#define clinic_state() (pysqlite_get_state_by_type(Py_TYPE(self)))
#include "clinic/blob.c.h"
@@ -56,9 +57,7 @@ blob_dealloc(PyObject *op)
close_blob(self);
- if (self->in_weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, self->in_weakreflist);
(void)tp->tp_clear(op);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c
index 7943bfcca36..0c3f43d0e50 100644
--- a/Modules/_sqlite/cursor.c
+++ b/Modules/_sqlite/cursor.c
@@ -31,6 +31,7 @@
#include "util.h"
#include "pycore_pyerrors.h" // _PyErr_FormatFromCause()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
typedef enum {
TYPE_LONG,
@@ -185,9 +186,7 @@ cursor_dealloc(PyObject *op)
pysqlite_Cursor *self = _pysqlite_Cursor_CAST(op);
PyTypeObject *tp = Py_TYPE(self);
PyObject_GC_UnTrack(self);
- if (self->in_weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, self->in_weakreflist);
(void)tp->tp_clear(op);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_sre/sre.c b/Modules/_sre/sre.c
index 602d0ab8588..e8943920043 100644
--- a/Modules/_sre/sre.c
+++ b/Modules/_sre/sre.c
@@ -44,6 +44,7 @@ static const char copyright[] =
#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_moduleobject.h" // _PyModule_GetState()
#include "pycore_unicodeobject.h" // _PyUnicode_Copy
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "sre.h" // SRE_CODE
@@ -736,10 +737,7 @@ pattern_dealloc(PyObject *self)
{
PyTypeObject *tp = Py_TYPE(self);
PyObject_GC_UnTrack(self);
- PatternObject *obj = _PatternObject_CAST(self);
- if (obj->weakreflist != NULL) {
- PyObject_ClearWeakRefs(self);
- }
+ FT_CLEAR_WEAKREFS(self, _PatternObject_CAST(self)->weakreflist);
(void)pattern_clear(self);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_stat.c b/Modules/_stat.c
index f11ca7d23b4..1dabf2f6d5b 100644
--- a/Modules/_stat.c
+++ b/Modules/_stat.c
@@ -57,7 +57,7 @@ typedef unsigned short mode_t;
* Only the names are defined by POSIX but not their value. All common file
* types seems to have the same numeric value on all platforms, though.
*
- * pyport.h guarantees S_IFMT, S_IFDIR, S_IFCHR, S_IFREG and S_IFLNK
+ * fileutils.h guarantees S_IFMT, S_IFDIR, S_IFCHR, S_IFREG and S_IFLNK
*/
#ifndef S_IFBLK
@@ -86,7 +86,7 @@ typedef unsigned short mode_t;
/* S_ISXXX()
- * pyport.h defines S_ISDIR(), S_ISREG() and S_ISCHR()
+ * fileutils.h defines S_ISDIR(), S_ISREG() and S_ISCHR()
*/
#ifndef S_ISBLK
diff --git a/Modules/_struct.c b/Modules/_struct.c
index c36079f1eb8..3fad35a8c94 100644
--- a/Modules/_struct.c
+++ b/Modules/_struct.c
@@ -11,6 +11,7 @@
#include "pycore_bytesobject.h" // _PyBytesWriter
#include "pycore_long.h" // _PyLong_AsByteArray()
#include "pycore_moduleobject.h" // _PyModule_GetState()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h> // offsetof()
@@ -1794,9 +1795,7 @@ s_dealloc(PyObject *op)
PyStructObject *s = PyStructObject_CAST(op);
PyTypeObject *tp = Py_TYPE(s);
PyObject_GC_UnTrack(s);
- if (s->weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, s->weakreflist);
if (s->s_codes != NULL) {
PyMem_Free(s->s_codes);
}
diff --git a/Modules/_testcapi/abstract.c b/Modules/_testcapi/abstract.c
index d4045afd515..c1f769456ac 100644
--- a/Modules/_testcapi/abstract.c
+++ b/Modules/_testcapi/abstract.c
@@ -178,6 +178,42 @@ sequence_fast_get_item(PyObject *self, PyObject *args)
}
+static PyObject *
+object_setattr_null_exc(PyObject *self, PyObject *args)
+{
+ PyObject *obj, *name, *exc;
+ if (!PyArg_ParseTuple(args, "OOO", &obj, &name, &exc)) {
+ return NULL;
+ }
+
+ PyErr_SetObject((PyObject*)Py_TYPE(exc), exc);
+ if (PyObject_SetAttr(obj, name, NULL) < 0) {
+ return NULL;
+ }
+ assert(PyErr_Occurred());
+ return NULL;
+}
+
+
+static PyObject *
+object_setattrstring_null_exc(PyObject *self, PyObject *args)
+{
+ PyObject *obj, *exc;
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_ParseTuple(args, "Oz#O", &obj, &name, &size, &exc)) {
+ return NULL;
+ }
+
+ PyErr_SetObject((PyObject*)Py_TYPE(exc), exc);
+ if (PyObject_SetAttrString(obj, name, NULL) < 0) {
+ return NULL;
+ }
+ assert(PyErr_Occurred());
+ return NULL;
+}
+
+
static PyMethodDef test_methods[] = {
{"object_getoptionalattr", object_getoptionalattr, METH_VARARGS},
{"object_getoptionalattrstring", object_getoptionalattrstring, METH_VARARGS},
@@ -191,6 +227,8 @@ static PyMethodDef test_methods[] = {
{"sequence_fast_get_size", sequence_fast_get_size, METH_O},
{"sequence_fast_get_item", sequence_fast_get_item, METH_VARARGS},
+ {"object_setattr_null_exc", object_setattr_null_exc, METH_VARARGS},
+ {"object_setattrstring_null_exc", object_setattrstring_null_exc, METH_VARARGS},
{NULL},
};
diff --git a/Modules/_testcapi/vectorcall.c b/Modules/_testcapi/vectorcall.c
index 03aaacb328e..f89dcb6c4cf 100644
--- a/Modules/_testcapi/vectorcall.c
+++ b/Modules/_testcapi/vectorcall.c
@@ -179,14 +179,14 @@ _testcapi_VectorCallClass_set_vectorcall_impl(PyObject *self,
if (!PyObject_TypeCheck(self, type)) {
return PyErr_Format(
PyExc_TypeError,
- "expected %s instance",
- PyType_GetName(type));
+ "expected %N instance",
+ type);
}
if (!type->tp_vectorcall_offset) {
return PyErr_Format(
PyExc_TypeError,
- "type %s has no vectorcall offset",
- PyType_GetName(type));
+ "type %N has no vectorcall offset",
+ type);
}
*(vectorcallfunc*)((char*)self + type->tp_vectorcall_offset) = (
VectorCallClass_vectorcall);
diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c
index fdf22a0c994..8027f0015c7 100644
--- a/Modules/_testinternalcapi.c
+++ b/Modules/_testinternalcapi.c
@@ -121,7 +121,7 @@ get_c_recursion_remaining(PyObject *self, PyObject *Py_UNUSED(args))
PyThreadState *tstate = _PyThreadState_GET();
uintptr_t here_addr = _Py_get_machine_stack_pointer();
_PyThreadStateImpl *_tstate = (_PyThreadStateImpl *)tstate;
- int remaining = (int)((here_addr - _tstate->c_stack_soft_limit)/PYOS_STACK_MARGIN_BYTES * 50);
+ int remaining = (int)((here_addr - _tstate->c_stack_soft_limit) / _PyOS_STACK_MARGIN_BYTES * 50);
return PyLong_FromLong(remaining);
}
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
index 150a266b521..8886a9d6bd0 100644
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -834,9 +834,14 @@ lock_PyThread_acquire_lock(PyObject *op, PyObject *args, PyObject *kwds)
return NULL;
}
- PyLockStatus r = _PyMutex_LockTimed(&self->lock, timeout,
- _PY_LOCK_HANDLE_SIGNALS | _PY_LOCK_DETACH);
+ PyLockStatus r = _PyMutex_LockTimed(
+ &self->lock, timeout,
+ _PY_LOCK_PYTHONLOCK | _PY_LOCK_HANDLE_SIGNALS | _PY_LOCK_DETACH);
if (r == PY_LOCK_INTR) {
+ assert(PyErr_Occurred());
+ return NULL;
+ }
+ if (r == PY_LOCK_FAILURE && PyErr_Occurred()) {
return NULL;
}
@@ -1054,9 +1059,14 @@ rlock_acquire(PyObject *op, PyObject *args, PyObject *kwds)
return NULL;
}
- PyLockStatus r = _PyRecursiveMutex_LockTimed(&self->lock, timeout,
- _PY_LOCK_HANDLE_SIGNALS | _PY_LOCK_DETACH);
+ PyLockStatus r = _PyRecursiveMutex_LockTimed(
+ &self->lock, timeout,
+ _PY_LOCK_PYTHONLOCK | _PY_LOCK_HANDLE_SIGNALS | _PY_LOCK_DETACH);
if (r == PY_LOCK_INTR) {
+ assert(PyErr_Occurred());
+ return NULL;
+ }
+ if (r == PY_LOCK_FAILURE && PyErr_Occurred()) {
return NULL;
}
@@ -1365,9 +1375,7 @@ static void
localdummy_dealloc(PyObject *op)
{
localdummyobject *self = localdummyobject_CAST(op);
- if (self->weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
PyTypeObject *tp = Py_TYPE(self);
tp->tp_free(self);
Py_DECREF(tp);
diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c
index abd53436b21..5c5383d260a 100644
--- a/Modules/_zoneinfo.c
+++ b/Modules/_zoneinfo.c
@@ -7,6 +7,7 @@
#include "pycore_long.h" // _PyLong_GetOne()
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
#include "pycore_typeobject.h" // _PyType_GetModuleState()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "datetime.h" // PyDateTime_TZInfo
@@ -375,9 +376,7 @@ zoneinfo_dealloc(PyObject *obj_self)
PyTypeObject *tp = Py_TYPE(self);
PyObject_GC_UnTrack(self);
- if (self->weakreflist != NULL) {
- PyObject_ClearWeakRefs(obj_self);
- }
+ FT_CLEAR_WEAKREFS(obj_self, self->weakreflist);
if (self->trans_list_utc != NULL) {
PyMem_Free(self->trans_list_utc);
diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c
index 401a3a7072b..5d07de2fba9 100644
--- a/Modules/arraymodule.c
+++ b/Modules/arraymodule.c
@@ -13,6 +13,7 @@
#include "pycore_ceval.h" // _PyEval_GetBuiltin()
#include "pycore_modsupport.h" // _PyArg_NoKeywords()
#include "pycore_moduleobject.h" // _PyModule_GetState()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h> // offsetof()
#include <stdbool.h>
@@ -728,9 +729,7 @@ array_dealloc(PyObject *op)
PyObject_GC_UnTrack(op);
arrayobject *self = arrayobject_CAST(op);
- if (self->weakreflist != NULL) {
- PyObject_ClearWeakRefs(op);
- }
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
if (self->ob_item != NULL) {
PyMem_Free(self->ob_item);
}
diff --git a/Modules/blake2module.c b/Modules/blake2module.c
index 9e279e11b51..163f238a426 100644
--- a/Modules/blake2module.c
+++ b/Modules/blake2module.c
@@ -43,25 +43,25 @@
// SIMD256 can't be compiled on macOS ARM64, and performance of SIMD128 isn't
// great; but when compiling a universal2 binary, autoconf will set
-// HACL_CAN_COMPILE_SIMD128 and HACL_CAN_COMPILE_SIMD256 because they *can* be
-// compiled on x86_64. If we're on macOS ARM64, disable these preprocessor
-// symbols.
+// _Py_HACL_CAN_COMPILE_VEC{128,256} because they *can* be compiled on x86_64.
+// If we're on macOS ARM64, we however disable these preprocessor symbols.
#if defined(__APPLE__) && defined(__arm64__)
-# undef HACL_CAN_COMPILE_SIMD128
-# undef HACL_CAN_COMPILE_SIMD256
+# undef _Py_HACL_CAN_COMPILE_VEC128
+# undef _Py_HACL_CAN_COMPILE_VEC256
#endif
-// Small mismatch between the variable names Python defines as part of configure
-// at the ones HACL* expects to be set in order to enable those headers.
-#define HACL_CAN_COMPILE_VEC128 HACL_CAN_COMPILE_SIMD128
-#define HACL_CAN_COMPILE_VEC256 HACL_CAN_COMPILE_SIMD256
+// HACL* expects HACL_CAN_COMPILE_VEC* macros to be set in order to enable
+// the corresponding SIMD instructions so we need to "forward" the values
+// we just deduced above.
+#define HACL_CAN_COMPILE_VEC128 _Py_HACL_CAN_COMPILE_VEC128
+#define HACL_CAN_COMPILE_VEC256 _Py_HACL_CAN_COMPILE_VEC256
#include "_hacl/Hacl_Hash_Blake2s.h"
#include "_hacl/Hacl_Hash_Blake2b.h"
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
#include "_hacl/Hacl_Hash_Blake2s_Simd128.h"
#endif
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
#include "_hacl/Hacl_Hash_Blake2b_Simd256.h"
#endif
@@ -88,7 +88,7 @@ blake2_get_state(PyObject *module)
return (Blake2State *)state;
}
-#if defined(HACL_CAN_COMPILE_SIMD128) || defined(HACL_CAN_COMPILE_SIMD256)
+#if defined(_Py_HACL_CAN_COMPILE_VEC128) || defined(_Py_HACL_CAN_COMPILE_VEC256)
static inline Blake2State *
blake2_get_state_from_type(PyTypeObject *module)
{
@@ -181,7 +181,7 @@ blake2module_init_cpu_features(Blake2State *state)
#undef ECX_SSE3
#undef EBX_AVX2
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
// TODO(picnixz): use py_cpuid_features (gh-125022) to improve detection
state->can_run_simd128 = sse && sse2 && sse3 && sse41 && sse42 && cmov;
#else
@@ -191,7 +191,7 @@ blake2module_init_cpu_features(Blake2State *state)
state->can_run_simd128 = false;
#endif
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
// TODO(picnixz): use py_cpuid_features (gh-125022) to improve detection
state->can_run_simd256 = state->can_run_simd128 && avx && avx2;
#else
@@ -332,18 +332,18 @@ is_blake2s(blake2_impl impl)
static inline blake2_impl
type_to_impl(PyTypeObject *type)
{
-#if defined(HACL_CAN_COMPILE_SIMD128) || defined(HACL_CAN_COMPILE_SIMD256)
+#if defined(_Py_HACL_CAN_COMPILE_VEC128) || defined(_Py_HACL_CAN_COMPILE_VEC256)
Blake2State *st = blake2_get_state_from_type(type);
#endif
if (!strcmp(type->tp_name, blake2b_type_spec.name)) {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
return st->can_run_simd256 ? Blake2b_256 : Blake2b;
#else
return Blake2b;
#endif
}
else if (!strcmp(type->tp_name, blake2s_type_spec.name)) {
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
return st->can_run_simd128 ? Blake2s_128 : Blake2s;
#else
return Blake2s;
@@ -357,10 +357,10 @@ typedef struct {
union {
Hacl_Hash_Blake2s_state_t *blake2s_state;
Hacl_Hash_Blake2b_state_t *blake2b_state;
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
Hacl_Hash_Blake2s_Simd128_state_t *blake2s_128_state;
#endif
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
Hacl_Hash_Blake2b_Simd256_state_t *blake2b_256_state;
#endif
};
@@ -429,13 +429,13 @@ blake2_update_unlocked(Blake2Object *self, uint8_t *buf, Py_ssize_t len)
switch (self->impl) {
// blake2b_256_state and blake2s_128_state must be if'd since
// otherwise this results in an unresolved symbol at link-time.
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
case Blake2b_256:
HACL_UPDATE(Hacl_Hash_Blake2b_Simd256_update,
self->blake2b_256_state, buf, len);
return;
#endif
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
case Blake2s_128:
HACL_UPDATE(Hacl_Hash_Blake2s_Simd128_update,
self->blake2s_128_state, buf, len);
@@ -555,12 +555,12 @@ py_blake2_new(PyTypeObject *type, PyObject *data, int digest_size,
// Ensure that the states are NULL-initialized in case of an error.
// See: py_blake2_clear() for more details.
switch (self->impl) {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
case Blake2b_256:
self->blake2b_256_state = NULL;
break;
#endif
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
case Blake2s_128:
self->blake2s_128_state = NULL;
break;
@@ -623,12 +623,12 @@ py_blake2_new(PyTypeObject *type, PyObject *data, int digest_size,
} while (0)
switch (self->impl) {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
case Blake2b_256:
BLAKE2_MALLOC(Blake2b_Simd256, self->blake2b_256_state);
break;
#endif
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
case Blake2s_128:
BLAKE2_MALLOC(Blake2s_Simd128, self->blake2s_128_state);
break;
@@ -756,12 +756,12 @@ blake2_blake2b_copy_unlocked(Blake2Object *self, Blake2Object *cpy)
} while (0)
switch (self->impl) {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
case Blake2b_256:
BLAKE2_COPY(Blake2b_Simd256, blake2b_256_state);
break;
#endif
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
case Blake2s_128:
BLAKE2_COPY(Blake2s_Simd128, blake2s_128_state);
break;
@@ -840,12 +840,12 @@ static uint8_t
blake2_blake2b_compute_digest(Blake2Object *self, uint8_t *digest)
{
switch (self->impl) {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
case Blake2b_256:
return Hacl_Hash_Blake2b_Simd256_digest(
self->blake2b_256_state, digest);
#endif
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
case Blake2s_128:
return Hacl_Hash_Blake2s_Simd128_digest(
self->blake2s_128_state, digest);
@@ -923,11 +923,11 @@ static Hacl_Hash_Blake2b_index
hacl_get_blake2_info(Blake2Object *self)
{
switch (self->impl) {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
case Blake2b_256:
return Hacl_Hash_Blake2b_Simd256_info(self->blake2b_256_state);
#endif
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
case Blake2s_128:
return Hacl_Hash_Blake2s_Simd128_info(self->blake2s_128_state);
#endif
@@ -975,12 +975,12 @@ py_blake2_clear(PyObject *op)
} while (0)
switch (self->impl) {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
case Blake2b_256:
BLAKE2_FREE(Blake2b_Simd256, self->blake2b_256_state);
break;
#endif
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
case Blake2s_128:
BLAKE2_FREE(Blake2s_Simd128, self->blake2s_128_state);
break;
diff --git a/Modules/clinic/_remote_debugging_module.c.h b/Modules/clinic/_remote_debugging_module.c.h
index 5c313a2d664..e80b24b54c0 100644
--- a/Modules/clinic/_remote_debugging_module.c.h
+++ b/Modules/clinic/_remote_debugging_module.c.h
@@ -10,7 +10,8 @@ preserve
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
PyDoc_STRVAR(_remote_debugging_RemoteUnwinder___init____doc__,
-"RemoteUnwinder(pid, *, all_threads=False, debug=False)\n"
+"RemoteUnwinder(pid, *, all_threads=False, only_active_thread=False,\n"
+" debug=False)\n"
"--\n"
"\n"
"Initialize a new RemoteUnwinder object for debugging a remote Python process.\n"
@@ -19,6 +20,8 @@ PyDoc_STRVAR(_remote_debugging_RemoteUnwinder___init____doc__,
" pid: Process ID of the target Python process to debug\n"
" all_threads: If True, initialize state for all threads in the process.\n"
" If False, only initialize for the main thread.\n"
+" only_active_thread: If True, only sample the thread holding the GIL.\n"
+" Cannot be used together with all_threads=True.\n"
" debug: If True, chain exceptions to explain the sequence of events that\n"
" lead to the exception.\n"
"\n"
@@ -28,11 +31,13 @@ PyDoc_STRVAR(_remote_debugging_RemoteUnwinder___init____doc__,
"Raises:\n"
" PermissionError: If access to the target process is denied\n"
" OSError: If unable to attach to the target process or access its memory\n"
-" RuntimeError: If unable to read debug information from the target process");
+" RuntimeError: If unable to read debug information from the target process\n"
+" ValueError: If both all_threads and only_active_thread are True");
static int
_remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
int pid, int all_threads,
+ int only_active_thread,
int debug);
static int
@@ -41,7 +46,7 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
int return_value = -1;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 3
+ #define NUM_KEYWORDS 4
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -50,7 +55,7 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(pid), &_Py_ID(all_threads), &_Py_ID(debug), },
+ .ob_item = { &_Py_ID(pid), &_Py_ID(all_threads), &_Py_ID(only_active_thread), &_Py_ID(debug), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -59,19 +64,20 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"pid", "all_threads", "debug", NULL};
+ static const char * const _keywords[] = {"pid", "all_threads", "only_active_thread", "debug", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "RemoteUnwinder",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[3];
+ PyObject *argsbuf[4];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 1;
int pid;
int all_threads = 0;
+ int only_active_thread = 0;
int debug = 0;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
@@ -95,12 +101,21 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
goto skip_optional_kwonly;
}
}
- debug = PyObject_IsTrue(fastargs[2]);
+ if (fastargs[2]) {
+ only_active_thread = PyObject_IsTrue(fastargs[2]);
+ if (only_active_thread < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
+ }
+ debug = PyObject_IsTrue(fastargs[3]);
if (debug < 0) {
goto exit;
}
skip_optional_kwonly:
- return_value = _remote_debugging_RemoteUnwinder___init___impl((RemoteUnwinderObject *)self, pid, all_threads, debug);
+ return_value = _remote_debugging_RemoteUnwinder___init___impl((RemoteUnwinderObject *)self, pid, all_threads, only_active_thread, debug);
exit:
return return_value;
@@ -110,13 +125,18 @@ PyDoc_STRVAR(_remote_debugging_RemoteUnwinder_get_stack_trace__doc__,
"get_stack_trace($self, /)\n"
"--\n"
"\n"
-"Returns a list of stack traces for all threads in the target process.\n"
+"Returns a list of stack traces for threads in the target process.\n"
"\n"
"Each element in the returned list is a tuple of (thread_id, frame_list), where:\n"
"- thread_id is the OS thread identifier\n"
"- frame_list is a list of tuples (function_name, filename, line_number) representing\n"
" the Python stack frames for that thread, ordered from most recent to oldest\n"
"\n"
+"The threads returned depend on the initialization parameters:\n"
+"- If only_active_thread was True: returns only the thread holding the GIL\n"
+"- If all_threads was True: returns all threads\n"
+"- Otherwise: returns only the main thread\n"
+"\n"
"Example:\n"
" [\n"
" (1234, [\n"
@@ -253,4 +273,4 @@ _remote_debugging_RemoteUnwinder_get_async_stack_trace(PyObject *self, PyObject
return return_value;
}
-/*[clinic end generated code: output=774ec34aa653402d input=a9049054013a1b77]*/
+/*[clinic end generated code: output=a37ab223d5081b16 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/mathmodule.c.h b/Modules/clinic/mathmodule.c.h
index fbb012fb6dd..a443c48faaa 100644
--- a/Modules/clinic/mathmodule.c.h
+++ b/Modules/clinic/mathmodule.c.h
@@ -84,6 +84,40 @@ PyDoc_STRVAR(math_floor__doc__,
#define MATH_FLOOR_METHODDEF \
{"floor", (PyCFunction)math_floor, METH_O, math_floor__doc__},
+PyDoc_STRVAR(math_signbit__doc__,
+"signbit($module, x, /)\n"
+"--\n"
+"\n"
+"Return True if the sign of x is negative and False otherwise.");
+
+#define MATH_SIGNBIT_METHODDEF \
+ {"signbit", (PyCFunction)math_signbit, METH_O, math_signbit__doc__},
+
+static PyObject *
+math_signbit_impl(PyObject *module, double x);
+
+static PyObject *
+math_signbit(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ double x;
+
+ if (PyFloat_CheckExact(arg)) {
+ x = PyFloat_AS_DOUBLE(arg);
+ }
+ else
+ {
+ x = PyFloat_AsDouble(arg);
+ if (x == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ return_value = math_signbit_impl(module, x);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(math_fsum__doc__,
"fsum($module, seq, /)\n"
"--\n"
@@ -1178,4 +1212,4 @@ math_ulp(PyObject *module, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=44bba3a0a052a364 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=4e3fa94d026f027b input=a9049054013a1b77]*/
diff --git a/Modules/getpath.py b/Modules/getpath.py
index be2210345af..b89d7427e3f 100644
--- a/Modules/getpath.py
+++ b/Modules/getpath.py
@@ -364,10 +364,9 @@ if not py_setpath:
venv_prefix = None
pyvenvcfg = []
- # Search for the 'home' key in pyvenv.cfg. Currently, we don't consider the
- # presence of a pyvenv.cfg file without a 'home' key to signify the
- # existence of a virtual environment — we quietly ignore them.
- # XXX: If we don't find a 'home' key, we don't look for another pyvenv.cfg!
+ # Search for the 'home' key in pyvenv.cfg. If a home key isn't found,
+ # then it means a venv is active and home is based on the venv's
+ # executable (if its a symlink, home is where the symlink points).
for line in pyvenvcfg:
key, had_equ, value = line.partition('=')
if had_equ and key.strip().lower() == 'home':
@@ -412,10 +411,8 @@ if not py_setpath:
if isfile(candidate):
base_executable = candidate
break
+ # home key found; stop iterating over lines
break
- else:
- # We didn't find a 'home' key in pyvenv.cfg (no break), reset venv_prefix.
- venv_prefix = None
# ******************************************************************************
diff --git a/Modules/hmacmodule.c b/Modules/hmacmodule.c
index e7a5ccbb19b..95e400231bb 100644
--- a/Modules/hmacmodule.c
+++ b/Modules/hmacmodule.c
@@ -31,14 +31,15 @@
#endif
#if defined(__APPLE__) && defined(__arm64__)
-# undef HACL_CAN_COMPILE_SIMD128
-# undef HACL_CAN_COMPILE_SIMD256
+# undef _Py_HACL_CAN_COMPILE_VEC128
+# undef _Py_HACL_CAN_COMPILE_VEC256
#endif
-// Small mismatch between the variable names Python defines as part of configure
-// at the ones HACL* expects to be set in order to enable those headers.
-#define HACL_CAN_COMPILE_VEC128 HACL_CAN_COMPILE_SIMD128
-#define HACL_CAN_COMPILE_VEC256 HACL_CAN_COMPILE_SIMD256
+// HACL* expects HACL_CAN_COMPILE_VEC* macros to be set in order to enable
+// the corresponding SIMD instructions so we need to "forward" the values
+// we just deduced above.
+#define HACL_CAN_COMPILE_VEC128 _Py_HACL_CAN_COMPILE_VEC128
+#define HACL_CAN_COMPILE_VEC256 _Py_HACL_CAN_COMPILE_VEC256
#include "_hacl/Hacl_HMAC.h"
#include "_hacl/Hacl_Streaming_HMAC.h" // Hacl_Agile_Hash_* identifiers
@@ -361,7 +362,7 @@ narrow_hmac_hash_kind(hmacmodule_state *state, HMAC_Hash_Kind kind)
{
switch (kind) {
case Py_hmac_kind_hmac_blake2s_32: {
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
if (state->can_run_simd128) {
return Py_hmac_kind_hmac_vectorized_blake2s_32;
}
@@ -369,7 +370,7 @@ narrow_hmac_hash_kind(hmacmodule_state *state, HMAC_Hash_Kind kind)
return kind;
}
case Py_hmac_kind_hmac_blake2b_32: {
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
if (state->can_run_simd256) {
return Py_hmac_kind_hmac_vectorized_blake2b_32;
}
@@ -1601,7 +1602,7 @@ hmacmodule_init_cpu_features(hmacmodule_state *state)
#undef ECX_SSE3
#undef EBX_AVX2
-#if HACL_CAN_COMPILE_SIMD128
+#if _Py_HACL_CAN_COMPILE_VEC128
// TODO(picnixz): use py_cpuid_features (gh-125022) to improve detection
state->can_run_simd128 = sse && sse2 && sse3 && sse41 && sse42 && cmov;
#else
@@ -1611,7 +1612,7 @@ hmacmodule_init_cpu_features(hmacmodule_state *state)
state->can_run_simd128 = false;
#endif
-#if HACL_CAN_COMPILE_SIMD256
+#if _Py_HACL_CAN_COMPILE_VEC256
// TODO(picnixz): use py_cpuid_features (gh-125022) to improve detection
state->can_run_simd256 = state->can_run_simd128 && avx && avx2;
#else
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index 2003546ce84..cc1a5580015 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -1880,8 +1880,8 @@ chain_traverse(PyObject *op, visitproc visit, void *arg)
return 0;
}
-static PyObject *
-chain_next(PyObject *op)
+static inline PyObject *
+chain_next_lock_held(PyObject *op)
{
chainobject *lz = chainobject_CAST(op);
PyObject *item;
@@ -1919,6 +1919,16 @@ chain_next(PyObject *op)
return NULL;
}
+static PyObject *
+chain_next(PyObject *op)
+{
+ PyObject *result;
+ Py_BEGIN_CRITICAL_SECTION(op);
+ result = chain_next_lock_held(op);
+ Py_END_CRITICAL_SECTION()
+ return result;
+}
+
PyDoc_STRVAR(chain_doc,
"chain(*iterables)\n\
--\n\
@@ -2086,7 +2096,7 @@ product_traverse(PyObject *op, visitproc visit, void *arg)
}
static PyObject *
-product_next(PyObject *op)
+product_next_lock_held(PyObject *op)
{
productobject *lz = productobject_CAST(op);
PyObject *pool;
@@ -2172,6 +2182,16 @@ empty:
return NULL;
}
+static PyObject *
+product_next(PyObject *op)
+{
+ PyObject *result;
+ Py_BEGIN_CRITICAL_SECTION(op);
+ result = product_next_lock_held(op);
+ Py_END_CRITICAL_SECTION()
+ return result;
+}
+
static PyMethodDef product_methods[] = {
{"__sizeof__", product_sizeof, METH_NOARGS, sizeof_doc},
{NULL, NULL} /* sentinel */
@@ -2319,7 +2339,7 @@ combinations_traverse(PyObject *op, visitproc visit, void *arg)
}
static PyObject *
-combinations_next(PyObject *op)
+combinations_next_lock_held(PyObject *op)
{
combinationsobject *co = combinationsobject_CAST(op);
PyObject *elem;
@@ -2404,6 +2424,16 @@ empty:
return NULL;
}
+static PyObject *
+combinations_next(PyObject *op)
+{
+ PyObject *result;
+ Py_BEGIN_CRITICAL_SECTION(op);
+ result = combinations_next_lock_held(op);
+ Py_END_CRITICAL_SECTION()
+ return result;
+}
+
static PyMethodDef combinations_methods[] = {
{"__sizeof__", combinations_sizeof, METH_NOARGS, sizeof_doc},
{NULL, NULL} /* sentinel */
diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c
index 1837de4735c..033de0b2907 100644
--- a/Modules/mathmodule.c
+++ b/Modules/mathmodule.c
@@ -1233,6 +1233,23 @@ FUNC2(remainder, m_remainder,
"Return x - n*y where n*y is the closest integer multiple of y.\n"
"In the case where x is exactly halfway between two multiples of\n"
"y, the nearest even value of n is used. The result is always exact.")
+
+/*[clinic input]
+math.signbit
+
+ x: double
+ /
+
+Return True if the sign of x is negative and False otherwise.
+[clinic start generated code]*/
+
+static PyObject *
+math_signbit_impl(PyObject *module, double x)
+/*[clinic end generated code: output=20c5f20156a9b871 input=3d3493fbcb5bdb3e]*/
+{
+ return PyBool_FromLong(signbit(x));
+}
+
FUNC1D(sin, sin, 0,
"sin($module, x, /)\n--\n\n"
"Return the sine of x (measured in radians).",
@@ -4199,6 +4216,7 @@ static PyMethodDef math_methods[] = {
MATH_POW_METHODDEF
MATH_RADIANS_METHODDEF
{"remainder", _PyCFunction_CAST(math_remainder), METH_FASTCALL, math_remainder_doc},
+ MATH_SIGNBIT_METHODDEF
{"sin", math_sin, METH_O, math_sin_doc},
{"sinh", math_sinh, METH_O, math_sinh_doc},
{"sqrt", math_sqrt, METH_O, math_sqrt_doc},
diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c
index 7c4eb05488e..142ff1a2131 100644
--- a/Modules/mmapmodule.c
+++ b/Modules/mmapmodule.c
@@ -25,6 +25,7 @@
#include <Python.h>
#include "pycore_bytesobject.h" // _PyBytes_Find()
#include "pycore_fileutils.h" // _Py_stat_struct
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h> // offsetof()
#ifndef MS_WINDOWS
@@ -163,8 +164,7 @@ mmap_object_dealloc(PyObject *op)
Py_END_ALLOW_THREADS
#endif /* UNIX */
- if (m_obj->weakreflist != NULL)
- PyObject_ClearWeakRefs(op);
+ FT_CLEAR_WEAKREFS(op, m_obj->weakreflist);
tp->tp_free(m_obj);
Py_DECREF(tp);
diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c
index fa153d86543..c449dd848d1 100644
--- a/Modules/pyexpat.c
+++ b/Modules/pyexpat.c
@@ -98,7 +98,11 @@ typedef struct {
#define CHARACTER_DATA_BUFFER_SIZE 8192
-typedef const void *xmlhandler;
+// A generic function type for storage.
+// To avoid undefined behaviors, a handler must be cast to the correct
+// function type before it's called; see SETTER_WRAPPER below.
+typedef void (*xmlhandler)(void);
+
typedef void (*xmlhandlersetter)(XML_Parser self, xmlhandler handler);
struct HandlerInfo {
@@ -110,9 +114,7 @@ struct HandlerInfo {
static struct HandlerInfo handler_info[64];
-// gh-111178: Use _Py_NO_SANITIZE_UNDEFINED, rather than using the exact
-// handler API for each handler.
-static inline void _Py_NO_SANITIZE_UNDEFINED
+static inline void
CALL_XML_HANDLER_SETTER(const struct HandlerInfo *handler_info,
XML_Parser xml_parser, xmlhandler xml_handler)
{
@@ -1365,7 +1367,7 @@ xmlparse_handler_setter(PyObject *op, PyObject *v, void *closure)
elaborate system of handlers and state could remove the
C handler more effectively. */
if (handlernum == CharacterData && self->in_callback) {
- c_handler = noop_character_data_handler;
+ c_handler = (xmlhandler)noop_character_data_handler;
}
v = NULL;
}
@@ -2222,13 +2224,84 @@ clear_handlers(xmlparseobject *self, int initial)
}
}
+/* To avoid undefined behaviors, a function must be *called* via a function
+ * pointer of the correct type.
+ * So, for each `XML_Set*` function, we define a wrapper that calls `XML_Set*`
+ * with its argument cast to the appropriate type.
+ */
+
+typedef void (*parser_only)(void *);
+typedef int (*not_standalone)(void *);
+typedef void (*parser_and_data)(void *, const XML_Char *);
+typedef void (*parser_and_data_and_int)(void *, const XML_Char *, int);
+typedef void (*parser_and_data_and_data)(
+ void *, const XML_Char *, const XML_Char *);
+typedef void (*start_element)(void *, const XML_Char *, const XML_Char **);
+typedef void (*element_decl)(void *, const XML_Char *, XML_Content *);
+typedef void (*xml_decl)(
+ void *, const XML_Char *, const XML_Char *, int);
+typedef void (*start_doctype_decl)(
+ void *, const XML_Char *, const XML_Char *, const XML_Char *, int);
+typedef void (*notation_decl)(
+ void *,
+ const XML_Char *, const XML_Char *, const XML_Char *, const XML_Char *);
+typedef void (*attlist_decl)(
+ void *,
+ const XML_Char *, const XML_Char *, const XML_Char *, const XML_Char *,
+ int);
+typedef void (*unparsed_entity_decl)(
+ void *,
+ const XML_Char *, const XML_Char *,
+ const XML_Char *, const XML_Char *, const XML_Char *);
+typedef void (*entity_decl)(
+ void *,
+ const XML_Char *, int,
+ const XML_Char *, int,
+ const XML_Char *, const XML_Char *, const XML_Char *, const XML_Char *);
+typedef int (*external_entity_ref)(
+ XML_Parser,
+ const XML_Char *, const XML_Char *, const XML_Char *, const XML_Char *);
+
+#define SETTER_WRAPPER(NAME, TYPE) \
+ static inline void \
+ pyexpat_Set ## NAME (XML_Parser parser, xmlhandler handler) \
+ { \
+ (void)XML_Set ## NAME (parser, (TYPE)handler); \
+ }
+
+SETTER_WRAPPER(StartElementHandler, start_element)
+SETTER_WRAPPER(EndElementHandler, parser_and_data)
+SETTER_WRAPPER(ProcessingInstructionHandler, parser_and_data_and_data)
+SETTER_WRAPPER(CharacterDataHandler, parser_and_data_and_int)
+SETTER_WRAPPER(UnparsedEntityDeclHandler, unparsed_entity_decl)
+SETTER_WRAPPER(NotationDeclHandler, notation_decl)
+SETTER_WRAPPER(StartNamespaceDeclHandler, parser_and_data_and_data)
+SETTER_WRAPPER(EndNamespaceDeclHandler, parser_and_data)
+SETTER_WRAPPER(CommentHandler, parser_and_data)
+SETTER_WRAPPER(StartCdataSectionHandler, parser_only)
+SETTER_WRAPPER(EndCdataSectionHandler, parser_only)
+SETTER_WRAPPER(DefaultHandler, parser_and_data_and_int)
+SETTER_WRAPPER(DefaultHandlerExpand, parser_and_data_and_int)
+SETTER_WRAPPER(NotStandaloneHandler, not_standalone)
+SETTER_WRAPPER(ExternalEntityRefHandler, external_entity_ref)
+SETTER_WRAPPER(StartDoctypeDeclHandler, start_doctype_decl)
+SETTER_WRAPPER(EndDoctypeDeclHandler, parser_only)
+SETTER_WRAPPER(EntityDeclHandler, entity_decl)
+SETTER_WRAPPER(XmlDeclHandler, xml_decl)
+SETTER_WRAPPER(ElementDeclHandler, element_decl)
+SETTER_WRAPPER(AttlistDeclHandler, attlist_decl)
+#if XML_COMBINED_VERSION >= 19504
+SETTER_WRAPPER(SkippedEntityHandler, parser_and_data_and_int)
+#endif
+#undef SETTER_WRAPPER
+
static struct HandlerInfo handler_info[] = {
// The cast to `xmlhandlersetter` is needed as the signature of XML
// handler functions is not compatible with `xmlhandlersetter` since
// their second parameter is narrower than a `const void *`.
#define HANDLER_INFO(name) \
- {#name, (xmlhandlersetter)XML_Set##name, my_##name},
+ {#name, (xmlhandlersetter)pyexpat_Set##name, (xmlhandler)my_##name},
HANDLER_INFO(StartElementHandler)
HANDLER_INFO(EndElementHandler)
diff --git a/Objects/classobject.c b/Objects/classobject.c
index 58e1d179773..e71f301f2ef 100644
--- a/Objects/classobject.c
+++ b/Objects/classobject.c
@@ -7,6 +7,7 @@
#include "pycore_object.h"
#include "pycore_pyerrors.h"
#include "pycore_pystate.h" // _PyThreadState_GET()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "clinic/classobject.c.h"
@@ -245,8 +246,7 @@ method_dealloc(PyObject *self)
{
PyMethodObject *im = _PyMethodObject_CAST(self);
_PyObject_GC_UNTRACK(im);
- if (im->im_weakreflist != NULL)
- PyObject_ClearWeakRefs((PyObject *)im);
+ FT_CLEAR_WEAKREFS(self, im->im_weakreflist);
Py_DECREF(im->im_func);
Py_XDECREF(im->im_self);
assert(Py_IS_TYPE(self, &PyMethod_Type));
diff --git a/Objects/codeobject.c b/Objects/codeobject.c
index 91772bc9d19..ba178abc0c0 100644
--- a/Objects/codeobject.c
+++ b/Objects/codeobject.c
@@ -17,6 +17,7 @@
#include "pycore_tuple.h" // _PyTuple_ITEMS()
#include "pycore_unicodeobject.h" // _PyUnicode_InternImmortal()
#include "pycore_uniqueid.h" // _PyObject_AssignUniqueId()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "clinic/codeobject.c.h"
#include <stdbool.h>
@@ -2436,9 +2437,7 @@ code_dealloc(PyObject *self)
Py_XDECREF(co->_co_cached->_co_varnames);
PyMem_Free(co->_co_cached);
}
- if (co->co_weakreflist != NULL) {
- PyObject_ClearWeakRefs(self);
- }
+ FT_CLEAR_WEAKREFS(self, co->co_weakreflist);
free_monitoring_data(co->_co_monitoring);
#ifdef Py_GIL_DISABLED
// The first element always points to the mutable bytecode at the end of
diff --git a/Objects/descrobject.c b/Objects/descrobject.c
index 10c465b95ac..d3d17e92b6d 100644
--- a/Objects/descrobject.c
+++ b/Objects/descrobject.c
@@ -1233,7 +1233,10 @@ static PyObject *
mappingproxy_richcompare(PyObject *self, PyObject *w, int op)
{
mappingproxyobject *v = (mappingproxyobject *)self;
- return PyObject_RichCompare(v->mapping, w, op);
+ if (op == Py_EQ || op == Py_NE) {
+ return PyObject_RichCompare(v->mapping, w, op);
+ }
+ Py_RETURN_NOTIMPLEMENTED;
}
static int
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 6b7b150f0e2..be62ae5eefd 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -4411,6 +4411,7 @@ dict_setdefault_ref_lock_held(PyObject *d, PyObject *key, PyObject *default_valu
if (result) {
*result = NULL;
}
+ return -1;
}
STORE_USED(mp, mp->ma_used + 1);
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index f8dd10a346d..9532c21fc70 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -10,6 +10,7 @@
#include "pycore_pyerrors.h" // _PyErr_Occurred()
#include "pycore_setobject.h" // _PySet_NextEntry()
#include "pycore_stats.h"
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
static const char *
@@ -1148,9 +1149,7 @@ func_dealloc(PyObject *self)
return;
}
_PyObject_GC_UNTRACK(op);
- if (op->func_weakreflist != NULL) {
- PyObject_ClearWeakRefs((PyObject *) op);
- }
+ FT_CLEAR_WEAKREFS(self, op->func_weakreflist);
(void)func_clear((PyObject*)op);
// These aren't cleared by func_clear().
_Py_DECREF_CODE((PyCodeObject *)op->func_code);
diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c
index 07b57f0c552..3bb961aa2b6 100644
--- a/Objects/genericaliasobject.c
+++ b/Objects/genericaliasobject.c
@@ -7,6 +7,7 @@
#include "pycore_typevarobject.h" // _Py_typing_type_repr
#include "pycore_unicodeobject.h" // _PyUnicode_EqualToASCIIString()
#include "pycore_unionobject.h" // _Py_union_type_or, _PyGenericAlias_Check
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stdbool.h>
@@ -33,9 +34,7 @@ ga_dealloc(PyObject *self)
gaobject *alias = (gaobject *)self;
_PyObject_GC_UNTRACK(self);
- if (alias->weakreflist != NULL) {
- PyObject_ClearWeakRefs((PyObject *)alias);
- }
+ FT_CLEAR_WEAKREFS(self, alias->weakreflist);
Py_XDECREF(alias->origin);
Py_XDECREF(alias->args);
Py_XDECREF(alias->parameters);
diff --git a/Objects/genobject.c b/Objects/genobject.c
index d0cb75d2d17..3e7d6257006 100644
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -17,6 +17,7 @@
#include "pycore_pyerrors.h" // _PyErr_ClearExcState()
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_warnings.h" // _PyErr_WarnUnawaitedCoroutine()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "opcode_ids.h" // RESUME, etc
@@ -161,8 +162,7 @@ gen_dealloc(PyObject *self)
_PyObject_GC_UNTRACK(gen);
- if (gen->gi_weakreflist != NULL)
- PyObject_ClearWeakRefs(self);
+ FT_CLEAR_WEAKREFS(self, gen->gi_weakreflist);
_PyObject_GC_TRACK(self);
diff --git a/Objects/listobject.c b/Objects/listobject.c
index 23d3472b6d4..1b36f4c25ab 100644
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -1685,10 +1685,7 @@ sortslice_advance(sortslice *slice, Py_ssize_t n)
/* Avoid malloc for small temp arrays. */
#define MERGESTATE_TEMP_SIZE 256
-/* The largest value of minrun. This must be a power of 2, and >= 1, so that
- * the compute_minrun() algorithm guarantees to return a result no larger than
- * this,
- */
+/* The largest value of minrun. This must be a power of 2, and >= 1 */
#define MAX_MINRUN 64
#if ((MAX_MINRUN) < 1) || ((MAX_MINRUN) & ((MAX_MINRUN) - 1))
#error "MAX_MINRUN must be a power of 2, and >= 1"
@@ -1749,6 +1746,11 @@ struct s_MergeState {
* of tuples. It may be set to safe_object_compare, but the idea is that hopefully
* we can assume more, and use one of the special-case compares. */
int (*tuple_elem_compare)(PyObject *, PyObject *, MergeState *);
+
+ /* Varisbles used for minrun computation. The "ideal" minrun length is
+ * the infinite precision listlen / 2**e. See listsort.txt.
+ */
+ Py_ssize_t mr_current, mr_e, mr_mask;
};
/* binarysort is the best method for sorting small arrays: it does few
@@ -2210,6 +2212,14 @@ merge_init(MergeState *ms, Py_ssize_t list_size, int has_keyfunc,
ms->min_gallop = MIN_GALLOP;
ms->listlen = list_size;
ms->basekeys = lo->keys;
+
+ /* State for generating minrun values. See listsort.txt. */
+ ms->mr_e = 0;
+ while (list_size >> ms->mr_e >= MAX_MINRUN) {
+ ++ms->mr_e;
+ }
+ ms->mr_mask = (1 << ms->mr_e) - 1;
+ ms->mr_current = 0;
}
/* Free all the temp memory owned by the MergeState. This must be called
@@ -2687,27 +2697,15 @@ merge_force_collapse(MergeState *ms)
return 0;
}
-/* Compute a good value for the minimum run length; natural runs shorter
- * than this are boosted artificially via binary insertion.
- *
- * If n < MAX_MINRUN return n (it's too small to bother with fancy stuff).
- * Else if n is an exact power of 2, return MAX_MINRUN / 2.
- * Else return an int k, MAX_MINRUN / 2 <= k <= MAX_MINRUN, such that n/k is
- * close to, but strictly less than, an exact power of 2.
- *
- * See listsort.txt for more info.
- */
-static Py_ssize_t
-merge_compute_minrun(Py_ssize_t n)
+/* Return the next minrun value to use. See listsort.txt. */
+Py_LOCAL_INLINE(Py_ssize_t)
+minrun_next(MergeState *ms)
{
- Py_ssize_t r = 0; /* becomes 1 if any 1 bits are shifted off */
-
- assert(n >= 0);
- while (n >= MAX_MINRUN) {
- r |= n & 1;
- n >>= 1;
- }
- return n + r;
+ ms->mr_current += ms->listlen;
+ assert(ms->mr_current >= 0); /* no overflow */
+ Py_ssize_t result = ms->mr_current >> ms->mr_e;
+ ms->mr_current &= ms->mr_mask;
+ return result;
}
/* Here we define custom comparison functions to optimize for the cases one commonly
@@ -3075,7 +3073,6 @@ list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse)
/* March over the array once, left to right, finding natural runs,
* and extending short natural runs to minrun elements.
*/
- minrun = merge_compute_minrun(nremaining);
do {
Py_ssize_t n;
@@ -3084,6 +3081,7 @@ list_sort_impl(PyListObject *self, PyObject *keyfunc, int reverse)
if (n < 0)
goto fail;
/* If short, extend to min(minrun, nremaining). */
+ minrun = minrun_next(&ms);
if (n < minrun) {
const Py_ssize_t force = nremaining <= minrun ?
nremaining : minrun;
diff --git a/Objects/listsort.txt b/Objects/listsort.txt
index f387d9c116e..5b2fc7d50a2 100644
--- a/Objects/listsort.txt
+++ b/Objects/listsort.txt
@@ -270,8 +270,8 @@ result. This has two primary good effects:
Computing minrun
----------------
-If N < MAX_MINRUN, minrun is N. IOW, binary insertion sort is used for the
-whole array then; it's hard to beat that given the overheads of trying
+If N < MAX_MINRUN, minrun is N. IOW, binary insertion sort is used for the
+whole array then; it's hard to beat that given the overheads of trying
something fancier (see note BINSORT).
When N is a power of 2, testing on random data showed that minrun values of
@@ -288,7 +288,6 @@ that 32 isn't a good choice for the general case! Consider N=2112:
>>> divmod(2112, 32)
(66, 0)
->>>
If the data is randomly ordered, we're very likely to end up with 66 runs
each of length 32. The first 64 of these trigger a sequence of perfectly
@@ -301,22 +300,94 @@ to get 64 elements into place).
If we take minrun=33 in this case, then we're very likely to end up with 64
runs each of length 33, and then all merges are perfectly balanced. Better!
-What we want to avoid is picking minrun such that in
+The original code used a cheap heuristic to pick a minrun that avoided the
+very worst cases of imbalance for the final merge, but "pretty bad" cases
+still existed.
- q, r = divmod(N, minrun)
+In 2025, Stefan Pochmann found a much better approach, based on letting minrun
+vary a bit from one run to the next. Under his scheme, at _all_ levels of the
+merge tree:
-q is a power of 2 and r>0 (then the last merge only gets r elements into
-place, and r < minrun is small compared to N), or q a little larger than a
-power of 2 regardless of r (then we've got a case similar to "2112", again
-leaving too little work for the last merge to do).
+- The number of runs is a power of 2.
+- At most two different run lengths appear.
+- When two do appear, the smaller is one less than the larger.
+- The lengths of run pairs merged never differ by more than one.
-Instead we pick a minrun in range(MAX_MINRUN / 2, MAX_MINRUN + 1) such that
-N/minrun is exactly a power of 2, or if that isn't possible, is close to, but
-strictly less than, a power of 2. This is easier to do than it may sound:
-take the first log2(MAX_MINRUN) bits of N, and add 1 if any of the remaining
-bits are set. In fact, that rule covers every case in this section, including
-small N and exact powers of 2; merge_compute_minrun() is a deceptively simple
-function.
+So, in all respects, as perfectly balanced as possible.
+
+For the 2112 case, that also keeps minrun at 33, but we were lucky there
+that 2112 is 33 times a power of 2. The new approach doesn't rely on luck.
+
+For example, with 315 random elements, the old scheme uses fixed minrun=40 and
+produces runs of length 40, except for the last. The new scheme produces a
+mix of lengths 39 and 40:
+
+old: 40 40 40 40 40 40 40 35
+new: 39 39 40 39 39 40 39 40
+
+Both schemes produce eight runs, a power of 2. That's good for a balanced
+merge tree. But the new scheme allows merges where left and right length
+never differ by more than 1:
+
+39 39 40 39 39 40 39 40
+ 78 79 79 79
+ 157 158
+ 315
+
+(This shows merges downward, e.g., two runs of length 39 are merged and
+become a run of length 78.)
+
+With larger lists, the old scheme can get even more unbalanced. For example,
+with 32769 elements (that's 2**15 + 1), it uses minrun=33 and produces 993
+runs (of length 33). That's not even a power of 2. The new scheme instead
+produces 1024 runs, all with length 32 except for the last one with length 33.
+
+How does it work? Ideally, all runs would be exactly equally long. For the
+above example, each run would have 315/8 = 39.375 elements. Which of course
+doesn't work. But we can get close:
+
+For the first run, we'd like 39.375 elements. Since that's impossible, we
+instead use 39 (the floor) and remember the current leftover fraction 0.375.
+For the second run, we add 0.375 + 39.375 = 39.75. Again impossible, so we
+instead use 39 and remember 0.75. For the third run, we add 0.75 + 39.375 =
+40.125. This time we get 40 and remember 0.125. And so on. Here's a Python
+generator doing that:
+
+def gen_minruns_with_floats(n):
+ mr = n
+ while mr >= MAX_MINRUN:
+ mr /= 2
+
+ mr_current = 0
+ while True:
+ mr_current += mr
+ yield int(mr_current)
+ mr_current %= 1
+
+But while all arithmetic here can be done exactly using binery floating point,
+floats have less precision that a Py_ssize_t, and mixing floats with ints is
+needlessly expensive anyway.
+
+So here's an integer version, where the internal numbers are scaled up by
+2**e, or rather not divided by 2**e. Instead, only each yielded minrun gets
+divided (by right-shifting). For example instead of adding 39.375 and
+reducing modulo 1, it just adds 315 and reduces modulo 8. And always divides
+by 8 to get each actual minrun value:
+
+def gen_minruns_simpler(n):
+ e = 0
+ while (n >> e) >= MAX_MINRUN:
+ e += 1
+ mask = (1 << e) - 1
+
+ mr_current = 0
+ while True:
+ mr_current += n
+ yield mr_current >> e
+ mr_current &= mask
+
+See note MINRUN CODE for a full implementation and a driver that exhaustively
+verifies the claims above for all list lengths through 2 million.
The Merge Pattern
@@ -820,3 +891,75 @@ partially mitigated by pre-scanning the data to determine whether the data is
homogeneous with respect to type. If so, it is sometimes possible to
substitute faster type-specific comparisons for the slower, generic
PyObject_RichCompareBool.
+
+MINRUN CODE
+from itertools import accumulate
+try:
+ from itertools import batched
+except ImportError:
+ from itertools import islice
+ def batched(xs, k):
+ it = iter(xs)
+ while chunk := tuple(islice(it, k)):
+ yield chunk
+
+MAX_MINRUN = 64
+
+def gen_minruns(n):
+ # In listobject.c, initialization is done in merge_init(), and
+ # the body of the loop in minrun_next().
+ mr_e = 0
+ while (n >> mr_e) >= MAX_MINRUN:
+ mr_e += 1
+ mr_mask = (1 << mr_e) - 1
+
+ mr_current = 0
+ while True:
+ mr_current += n
+ yield mr_current >> mr_e
+ mr_current &= mr_mask
+
+def chew(n, show=False):
+ if n < 1:
+ return
+
+ sizes = []
+ tot = 0
+ for size in gen_minruns(n):
+ sizes.append(size)
+ tot += size
+ if tot >= n:
+ break
+ assert tot == n
+ print(n, len(sizes))
+
+ small, large = MAX_MINRUN // 2, MAX_MINRUN
+ while len(sizes) > 1:
+ assert not len(sizes) & 1
+ assert len(sizes).bit_count() == 1 # i.e., power of 2
+ assert sum(sizes) == n
+ assert min(sizes) >= min(n, small)
+ assert max(sizes) <= large
+
+ d = set(sizes)
+ assert len(d) <= 2
+ if len(d) == 2:
+ lo, hi = sorted(d)
+ assert lo + 1 == hi
+
+ mr = n / len(sizes)
+ for i, s in enumerate(accumulate(sizes, initial=0)):
+ assert int(mr * i) == s
+
+ newsizes = []
+ for a, b in batched(sizes, 2):
+ assert abs(a - b) <= 1
+ newsizes.append(a + b)
+ sizes = newsizes
+ smsll = large
+ large *= 2
+
+ assert sizes[0] == n
+
+for n in range(2_000_001):
+ chew(n) \ No newline at end of file
diff --git a/Objects/methodobject.c b/Objects/methodobject.c
index c3dcd09ad1c..e6e469ca270 100644
--- a/Objects/methodobject.c
+++ b/Objects/methodobject.c
@@ -8,6 +8,7 @@
#include "pycore_object.h"
#include "pycore_pyerrors.h"
#include "pycore_pystate.h" // _PyThreadState_GET()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
/* undefine macro trampoline to PyCFunction_NewEx */
@@ -167,9 +168,7 @@ meth_dealloc(PyObject *self)
{
PyCFunctionObject *m = _PyCFunctionObject_CAST(self);
PyObject_GC_UnTrack(m);
- if (m->m_weakreflist != NULL) {
- PyObject_ClearWeakRefs((PyObject*) m);
- }
+ FT_CLEAR_WEAKREFS(self, m->m_weakreflist);
// We need to access ml_flags here rather than later.
// `m->m_ml` might have the same lifetime
// as `m_self` when it's dynamically allocated.
diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c
index ba86b41e945..862395e7881 100644
--- a/Objects/moduleobject.c
+++ b/Objects/moduleobject.c
@@ -13,6 +13,7 @@
#include "pycore_pyerrors.h" // _PyErr_FormatFromCause()
#include "pycore_pystate.h" // _PyInterpreterState_GET()
#include "pycore_unicodeobject.h" // _PyUnicode_EqualToASCIIString()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "osdefs.h" // MAXPATHLEN
@@ -826,8 +827,7 @@ module_dealloc(PyObject *self)
if (verbose && m->md_name) {
PySys_FormatStderr("# destroy %U\n", m->md_name);
}
- if (m->md_weaklist != NULL)
- PyObject_ClearWeakRefs((PyObject *) m);
+ FT_CLEAR_WEAKREFS(self, m->md_weaklist);
/* bpo-39824: Don't call m_free() if m_size > 0 and md_state=NULL */
if (m->md_def && m->md_def->m_free
diff --git a/Objects/namespaceobject.c b/Objects/namespaceobject.c
index 0fc2bcea4cb..201cb8a7df8 100644
--- a/Objects/namespaceobject.c
+++ b/Objects/namespaceobject.c
@@ -194,10 +194,14 @@ namespace_clear(PyObject *op)
static PyObject *
namespace_richcompare(PyObject *self, PyObject *other, int op)
{
- if (PyObject_TypeCheck(self, &_PyNamespace_Type) &&
- PyObject_TypeCheck(other, &_PyNamespace_Type))
+ if (
+ (op == Py_EQ || op == Py_NE) &&
+ PyObject_TypeCheck(self, &_PyNamespace_Type) &&
+ PyObject_TypeCheck(other, &_PyNamespace_Type)
+ ) {
return PyObject_RichCompare(((_PyNamespaceObject *)self)->ns_dict,
((_PyNamespaceObject *)other)->ns_dict, op);
+ }
Py_RETURN_NOTIMPLEMENTED;
}
diff --git a/Objects/object.c b/Objects/object.c
index 4d60128b092..3ed7d55593d 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -1131,11 +1131,14 @@ PyObject_RichCompareBool(PyObject *v, PyObject *w, int op)
res = PyObject_RichCompare(v, w, op);
if (res == NULL)
return -1;
- if (PyBool_Check(res))
+ if (PyBool_Check(res)) {
ok = (res == Py_True);
- else
+ assert(_Py_IsImmortal(res));
+ }
+ else {
ok = PyObject_IsTrue(res);
- Py_DECREF(res);
+ Py_DECREF(res);
+ }
return ok;
}
@@ -1210,16 +1213,27 @@ PyObject_HasAttrString(PyObject *obj, const char *name)
int
PyObject_SetAttrString(PyObject *v, const char *name, PyObject *w)
{
- PyObject *s;
- int res;
+ PyThreadState *tstate = _PyThreadState_GET();
+ if (w == NULL && _PyErr_Occurred(tstate)) {
+ PyObject *exc = _PyErr_GetRaisedException(tstate);
+ _PyErr_SetString(tstate, PyExc_SystemError,
+ "PyObject_SetAttrString() must not be called with NULL value "
+ "and an exception set");
+ _PyErr_ChainExceptions1Tstate(tstate, exc);
+ return -1;
+ }
- if (Py_TYPE(v)->tp_setattr != NULL)
+ if (Py_TYPE(v)->tp_setattr != NULL) {
return (*Py_TYPE(v)->tp_setattr)(v, (char*)name, w);
- s = PyUnicode_InternFromString(name);
- if (s == NULL)
+ }
+
+ PyObject *s = PyUnicode_InternFromString(name);
+ if (s == NULL) {
return -1;
- res = PyObject_SetAttr(v, s, w);
- Py_XDECREF(s);
+ }
+
+ int res = PyObject_SetAttr(v, s, w);
+ Py_DECREF(s);
return res;
}
@@ -1437,6 +1451,16 @@ PyObject_HasAttr(PyObject *obj, PyObject *name)
int
PyObject_SetAttr(PyObject *v, PyObject *name, PyObject *value)
{
+ PyThreadState *tstate = _PyThreadState_GET();
+ if (value == NULL && _PyErr_Occurred(tstate)) {
+ PyObject *exc = _PyErr_GetRaisedException(tstate);
+ _PyErr_SetString(tstate, PyExc_SystemError,
+ "PyObject_SetAttr() must not be called with NULL value "
+ "and an exception set");
+ _PyErr_ChainExceptions1Tstate(tstate, exc);
+ return -1;
+ }
+
PyTypeObject *tp = Py_TYPE(v);
int err;
@@ -1448,8 +1472,7 @@ PyObject_SetAttr(PyObject *v, PyObject *name, PyObject *value)
}
Py_INCREF(name);
- PyInterpreterState *interp = _PyInterpreterState_GET();
- _PyUnicode_InternMortal(interp, &name);
+ _PyUnicode_InternMortal(tstate->interp, &name);
if (tp->tp_setattro != NULL) {
err = (*tp->tp_setattro)(v, name, value);
Py_DECREF(name);
diff --git a/Objects/odictobject.c b/Objects/odictobject.c
index 891f6197401..02fcbbaa0d4 100644
--- a/Objects/odictobject.c
+++ b/Objects/odictobject.c
@@ -473,6 +473,7 @@ later:
#include "pycore_pyerrors.h" // _PyErr_ChainExceptions1()
#include "pycore_tuple.h" // _PyTuple_Recycle()
#include <stddef.h> // offsetof()
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "clinic/odictobject.c.h"
@@ -1391,8 +1392,7 @@ odict_dealloc(PyObject *op)
PyObject_GC_UnTrack(self);
Py_XDECREF(self->od_inst_dict);
- if (self->od_weakreflist != NULL)
- PyObject_ClearWeakRefs((PyObject *)self);
+ FT_CLEAR_WEAKREFS(op, self->od_weakreflist);
_odict_clear_nodes(self);
PyDict_Type.tp_dealloc((PyObject *)self);
diff --git a/Objects/picklebufobject.c b/Objects/picklebufobject.c
index 3ce800de04c..50f17687bc4 100644
--- a/Objects/picklebufobject.c
+++ b/Objects/picklebufobject.c
@@ -1,6 +1,7 @@
/* PickleBuffer object implementation */
#include "Python.h"
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include <stddef.h>
typedef struct {
@@ -111,8 +112,7 @@ picklebuf_dealloc(PyObject *op)
{
PyPickleBufferObject *self = (PyPickleBufferObject*)op;
PyObject_GC_UnTrack(self);
- if (self->weakreflist != NULL)
- PyObject_ClearWeakRefs((PyObject *) self);
+ FT_CLEAR_WEAKREFS(op, self->weakreflist);
PyBuffer_Release(&self->view);
Py_TYPE(self)->tp_free((PyObject *) self);
}
diff --git a/Objects/setobject.c b/Objects/setobject.c
index 8aa6b0d1809..6e4fc5957ca 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -40,6 +40,7 @@
#include "pycore_pyatomic_ft_wrappers.h" // FT_ATOMIC_LOAD_SSIZE_RELAXED()
#include "pycore_pyerrors.h" // _PyErr_SetKeyError()
#include "pycore_setobject.h" // _PySet_NextEntry() definition
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
#include "stringlib/eq.h" // unicode_eq()
#include <stddef.h> // offsetof()
@@ -536,8 +537,7 @@ set_dealloc(PyObject *self)
/* bpo-31095: UnTrack is needed before calling any callbacks */
PyObject_GC_UnTrack(so);
- if (so->weakreflist != NULL)
- PyObject_ClearWeakRefs((PyObject *) so);
+ FT_CLEAR_WEAKREFS(self, so->weakreflist);
for (entry = so->table; used > 0; entry++) {
if (entry->key && entry->key != dummy) {
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index b9d54961069..e84278d13c3 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -54,7 +54,6 @@ class object "PyObject *" "&PyBaseObject_Type"
PyUnicode_CheckExact(name) && \
(PyUnicode_GET_LENGTH(name) <= MCACHE_MAX_ATTR_SIZE)
-#define NEXT_GLOBAL_VERSION_TAG _PyRuntime.types.next_version_tag
#define NEXT_VERSION_TAG(interp) \
(interp)->types.next_version_tag
@@ -266,8 +265,8 @@ static_ext_type_lookup(PyInterpreterState *interp, size_t index,
assert(index < _Py_MAX_MANAGED_STATIC_EXT_TYPES);
size_t full_index = index + _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES;
- int64_t interp_count =
- _PyRuntime.types.managed_static.types[full_index].interp_count;
+ int64_t interp_count = _Py_atomic_load_int64(
+ &_PyRuntime.types.managed_static.types[full_index].interp_count);
assert((interp_count == 0) ==
(_PyRuntime.types.managed_static.types[full_index].type == NULL));
*p_interp_count = interp_count;
@@ -344,7 +343,7 @@ managed_static_type_state_init(PyInterpreterState *interp, PyTypeObject *self,
: index + _Py_MAX_MANAGED_STATIC_BUILTIN_TYPES;
assert((initial == 1) ==
- (_PyRuntime.types.managed_static.types[full_index].interp_count == 0));
+ (_Py_atomic_load_int64(&_PyRuntime.types.managed_static.types[full_index].interp_count) == 0));
(void)_Py_atomic_add_int64(
&_PyRuntime.types.managed_static.types[full_index].interp_count, 1);
@@ -393,7 +392,7 @@ managed_static_type_state_clear(PyInterpreterState *interp, PyTypeObject *self,
: &(interp->types.for_extensions.initialized[index]);
assert(state != NULL);
- assert(_PyRuntime.types.managed_static.types[full_index].interp_count > 0);
+ assert(_Py_atomic_load_int64(&_PyRuntime.types.managed_static.types[full_index].interp_count) > 0);
assert(_PyRuntime.types.managed_static.types[full_index].type == state->type);
assert(state->type != NULL);
@@ -403,7 +402,7 @@ managed_static_type_state_clear(PyInterpreterState *interp, PyTypeObject *self,
(void)_Py_atomic_add_int64(
&_PyRuntime.types.managed_static.types[full_index].interp_count, -1);
if (final) {
- assert(!_PyRuntime.types.managed_static.types[full_index].interp_count);
+ assert(!_Py_atomic_load_int64(&_PyRuntime.types.managed_static.types[full_index].interp_count));
_PyRuntime.types.managed_static.types[full_index].type = NULL;
managed_static_type_index_clear(self);
@@ -1359,6 +1358,19 @@ _PyType_LookupByVersion(unsigned int version)
#error "_Py_ATTR_CACHE_UNUSED must be bigger than max"
#endif
+static inline unsigned int
+next_global_version_tag(void)
+{
+ unsigned int old;
+ do {
+ old = _Py_atomic_load_uint_relaxed(&_PyRuntime.types.next_version_tag);
+ if (old >= _Py_MAX_GLOBAL_TYPE_VERSION_TAG) {
+ return 0;
+ }
+ } while (!_Py_atomic_compare_exchange_uint(&_PyRuntime.types.next_version_tag, &old, old + 1));
+ return old + 1;
+}
+
static int
assign_version_tag(PyInterpreterState *interp, PyTypeObject *type)
{
@@ -1389,11 +1401,12 @@ assign_version_tag(PyInterpreterState *interp, PyTypeObject *type)
}
if (type->tp_flags & Py_TPFLAGS_IMMUTABLETYPE) {
/* static types */
- if (NEXT_GLOBAL_VERSION_TAG > _Py_MAX_GLOBAL_TYPE_VERSION_TAG) {
+ unsigned int next_version_tag = next_global_version_tag();
+ if (next_version_tag == 0) {
/* We have run out of version numbers */
return 0;
}
- set_version_unlocked(type, NEXT_GLOBAL_VERSION_TAG++);
+ set_version_unlocked(type, next_version_tag);
assert (type->tp_version_tag <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG);
}
else {
@@ -9007,7 +9020,11 @@ type_ready_set_new(PyTypeObject *type, int initial)
&& base == &PyBaseObject_Type
&& !(type->tp_flags & Py_TPFLAGS_HEAPTYPE))
{
- type_add_flags(type, Py_TPFLAGS_DISALLOW_INSTANTIATION);
+ if (initial) {
+ type_add_flags(type, Py_TPFLAGS_DISALLOW_INSTANTIATION);
+ } else {
+ assert(type->tp_flags & Py_TPFLAGS_DISALLOW_INSTANTIATION);
+ }
}
if (!(type->tp_flags & Py_TPFLAGS_DISALLOW_INSTANTIATION)) {
@@ -9021,13 +9038,17 @@ type_ready_set_new(PyTypeObject *type, int initial)
}
}
else {
- // tp_new is NULL: inherit tp_new from base
- type->tp_new = base->tp_new;
+ if (initial) {
+ // tp_new is NULL: inherit tp_new from base
+ type->tp_new = base->tp_new;
+ }
}
}
else {
// Py_TPFLAGS_DISALLOW_INSTANTIATION sets tp_new to NULL
- type->tp_new = NULL;
+ if (initial) {
+ type->tp_new = NULL;
+ }
}
return 0;
}
@@ -9160,7 +9181,12 @@ type_ready(PyTypeObject *type, int initial)
}
/* All done -- set the ready flag */
- type_add_flags(type, Py_TPFLAGS_READY);
+ if (initial) {
+ type_add_flags(type, Py_TPFLAGS_READY);
+ } else {
+ assert(type->tp_flags & Py_TPFLAGS_READY);
+ }
+
stop_readying(type);
assert(_PyType_CheckConsistency(type));
@@ -9209,15 +9235,16 @@ init_static_type(PyInterpreterState *interp, PyTypeObject *self,
assert(!(self->tp_flags & Py_TPFLAGS_MANAGED_DICT));
assert(!(self->tp_flags & Py_TPFLAGS_MANAGED_WEAKREF));
- if ((self->tp_flags & Py_TPFLAGS_READY) == 0) {
- assert(initial);
+ if (initial) {
+ assert((self->tp_flags & Py_TPFLAGS_READY) == 0);
type_add_flags(self, _Py_TPFLAGS_STATIC_BUILTIN);
type_add_flags(self, Py_TPFLAGS_IMMUTABLETYPE);
- assert(NEXT_GLOBAL_VERSION_TAG <= _Py_MAX_GLOBAL_TYPE_VERSION_TAG);
if (self->tp_version_tag == 0) {
- _PyType_SetVersion(self, NEXT_GLOBAL_VERSION_TAG++);
+ unsigned int next_version_tag = next_global_version_tag();
+ assert(next_version_tag != 0);
+ _PyType_SetVersion(self, next_version_tag);
}
}
else {
@@ -10020,6 +10047,11 @@ tp_new_wrapper(PyObject *self, PyObject *args, PyObject *kwds)
/* If staticbase is NULL now, it is a really weird type.
In the spirit of backwards compatibility (?), just shut up. */
if (staticbase && staticbase->tp_new != type->tp_new) {
+ if (staticbase->tp_new == NULL) {
+ PyErr_Format(PyExc_TypeError,
+ "cannot create '%s' instances", subtype->tp_name);
+ return NULL;
+ }
PyErr_Format(PyExc_TypeError,
"%s.__new__(%s) is not safe, use %s.__new__()",
type->tp_name,
diff --git a/Objects/unionobject.c b/Objects/unionobject.c
index 00ca5b9bf80..2206ed80ef0 100644
--- a/Objects/unionobject.c
+++ b/Objects/unionobject.c
@@ -4,6 +4,7 @@
#include "pycore_typevarobject.h" // _PyTypeAlias_Type, _Py_typing_type_repr
#include "pycore_unicodeobject.h" // _PyUnicode_EqualToASCIIString
#include "pycore_unionobject.h"
+#include "pycore_weakref.h" // FT_CLEAR_WEAKREFS()
typedef struct {
@@ -21,9 +22,7 @@ unionobject_dealloc(PyObject *self)
unionobject *alias = (unionobject *)self;
_PyObject_GC_UNTRACK(self);
- if (alias->weakreflist != NULL) {
- PyObject_ClearWeakRefs((PyObject *)alias);
- }
+ FT_CLEAR_WEAKREFS(self, alias->weakreflist);
Py_XDECREF(alias->args);
Py_XDECREF(alias->hashable_args);
diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj
index 32a8f2dbad3..b911c938563 100644
--- a/PCbuild/pythoncore.vcxproj
+++ b/PCbuild/pythoncore.vcxproj
@@ -419,8 +419,12 @@
<ClCompile Include="..\Modules\_abc.c" />
<ClCompile Include="..\Modules\_bisectmodule.c" />
<ClCompile Include="..\Modules\blake2module.c">
- <PreprocessorDefinitions Condition="'$(Platform)' == 'x64'">HACL_CAN_COMPILE_SIMD128;%(PreprocessorDefinitions)</PreprocessorDefinitions>
- <PreprocessorDefinitions Condition="'$(Platform)' == 'x64'">HACL_CAN_COMPILE_SIMD256;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <PreprocessorDefinitions Condition="'$(Platform)' == 'x64'">
+ _Py_HACL_CAN_COMPILE_VEC128;%(PreprocessorDefinitions)
+ </PreprocessorDefinitions>
+ <PreprocessorDefinitions Condition="'$(Platform)' == 'x64'">
+ _Py_HACL_CAN_COMPILE_VEC256;%(PreprocessorDefinitions)
+ </PreprocessorDefinitions>
</ClCompile>
<ClCompile Include="..\Modules\_codecsmodule.c" />
<ClCompile Include="..\Modules\_collectionsmodule.c" />
diff --git a/Parser/parser.c b/Parser/parser.c
index ee0aeb4e187..58fea894a79 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -21,59 +21,59 @@ static KeywordToken *reserved_keywords[] = {
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {{NULL, -1}},
(KeywordToken[]) {
- {"if", 686},
- {"as", 684},
- {"in", 699},
- {"or", 588},
- {"is", 596},
+ {"if", 687},
+ {"as", 685},
+ {"in", 700},
+ {"or", 589},
+ {"is", 597},
{NULL, -1},
},
(KeywordToken[]) {
- {"del", 629},
- {"def", 703},
- {"for", 698},
- {"try", 660},
- {"and", 589},
- {"not", 707},
+ {"del", 630},
+ {"def", 704},
+ {"for", 699},
+ {"try", 661},
+ {"and", 590},
+ {"not", 708},
{NULL, -1},
},
(KeywordToken[]) {
- {"from", 637},
- {"pass", 526},
- {"with", 651},
- {"elif", 691},
- {"else", 690},
- {"None", 623},
- {"True", 622},
+ {"from", 638},
+ {"pass", 527},
+ {"with", 652},
+ {"elif", 692},
+ {"else", 691},
+ {"None", 624},
+ {"True", 623},
{NULL, -1},
},
(KeywordToken[]) {
- {"raise", 627},
- {"yield", 587},
- {"break", 527},
- {"async", 702},
- {"class", 705},
- {"while", 693},
- {"False", 624},
- {"await", 597},
+ {"raise", 628},
+ {"yield", 588},
+ {"break", 528},
+ {"async", 703},
+ {"class", 706},
+ {"while", 694},
+ {"False", 625},
+ {"await", 598},
{NULL, -1},
},
(KeywordToken[]) {
{"return", 522},
- {"import", 638},
- {"assert", 532},
- {"global", 529},
- {"except", 681},
- {"lambda", 621},
+ {"import", 639},
+ {"assert", 533},
+ {"global", 530},
+ {"except", 682},
+ {"lambda", 622},
{NULL, -1},
},
(KeywordToken[]) {
- {"finally", 677},
+ {"finally", 678},
{NULL, -1},
},
(KeywordToken[]) {
- {"continue", 528},
- {"nonlocal", 530},
+ {"continue", 529},
+ {"nonlocal", 531},
{NULL, -1},
},
};
@@ -364,114 +364,114 @@ static char *soft_keywords[] = {
#define _tmp_10_type 1277
#define _tmp_11_type 1278
#define _loop1_12_type 1279
-#define _tmp_13_type 1280
-#define _loop0_14_type 1281
-#define _gather_15_type 1282
+#define _loop0_13_type 1280
+#define _gather_14_type 1281
+#define _tmp_15_type 1282
#define _tmp_16_type 1283
-#define _tmp_17_type 1284
-#define _loop0_18_type 1285
-#define _loop1_19_type 1286
-#define _loop0_20_type 1287
-#define _gather_21_type 1288
-#define _tmp_22_type 1289
-#define _loop0_23_type 1290
-#define _gather_24_type 1291
-#define _loop1_25_type 1292
+#define _loop0_17_type 1284
+#define _loop1_18_type 1285
+#define _loop0_19_type 1286
+#define _gather_20_type 1287
+#define _tmp_21_type 1288
+#define _loop0_22_type 1289
+#define _gather_23_type 1290
+#define _loop1_24_type 1291
+#define _tmp_25_type 1292
#define _tmp_26_type 1293
-#define _tmp_27_type 1294
+#define _loop0_27_type 1294
#define _loop0_28_type 1295
-#define _loop0_29_type 1296
+#define _loop1_29_type 1296
#define _loop1_30_type 1297
-#define _loop1_31_type 1298
-#define _loop0_32_type 1299
-#define _loop1_33_type 1300
-#define _loop0_34_type 1301
-#define _gather_35_type 1302
-#define _tmp_36_type 1303
+#define _loop0_31_type 1298
+#define _loop1_32_type 1299
+#define _loop0_33_type 1300
+#define _gather_34_type 1301
+#define _tmp_35_type 1302
+#define _loop1_36_type 1303
#define _loop1_37_type 1304
#define _loop1_38_type 1305
-#define _loop1_39_type 1306
-#define _loop0_40_type 1307
-#define _gather_41_type 1308
+#define _loop0_39_type 1306
+#define _gather_40_type 1307
+#define _tmp_41_type 1308
#define _tmp_42_type 1309
#define _tmp_43_type 1310
-#define _tmp_44_type 1311
-#define _loop0_45_type 1312
-#define _gather_46_type 1313
-#define _loop0_47_type 1314
-#define _gather_48_type 1315
-#define _tmp_49_type 1316
-#define _loop0_50_type 1317
-#define _gather_51_type 1318
-#define _loop0_52_type 1319
-#define _gather_53_type 1320
-#define _loop0_54_type 1321
-#define _gather_55_type 1322
+#define _loop0_44_type 1311
+#define _gather_45_type 1312
+#define _loop0_46_type 1313
+#define _gather_47_type 1314
+#define _tmp_48_type 1315
+#define _loop0_49_type 1316
+#define _gather_50_type 1317
+#define _loop0_51_type 1318
+#define _gather_52_type 1319
+#define _loop0_53_type 1320
+#define _gather_54_type 1321
+#define _loop1_55_type 1322
#define _loop1_56_type 1323
-#define _loop1_57_type 1324
-#define _loop0_58_type 1325
-#define _gather_59_type 1326
+#define _loop0_57_type 1324
+#define _gather_58_type 1325
+#define _loop1_59_type 1326
#define _loop1_60_type 1327
#define _loop1_61_type 1328
-#define _loop1_62_type 1329
-#define _tmp_63_type 1330
-#define _loop0_64_type 1331
-#define _gather_65_type 1332
+#define _tmp_62_type 1329
+#define _loop0_63_type 1330
+#define _gather_64_type 1331
+#define _tmp_65_type 1332
#define _tmp_66_type 1333
#define _tmp_67_type 1334
#define _tmp_68_type 1335
#define _tmp_69_type 1336
-#define _tmp_70_type 1337
+#define _loop0_70_type 1337
#define _loop0_71_type 1338
-#define _loop0_72_type 1339
+#define _loop1_72_type 1339
#define _loop1_73_type 1340
-#define _loop1_74_type 1341
-#define _loop0_75_type 1342
-#define _loop1_76_type 1343
+#define _loop0_74_type 1341
+#define _loop1_75_type 1342
+#define _loop0_76_type 1343
#define _loop0_77_type 1344
#define _loop0_78_type 1345
#define _loop0_79_type 1346
-#define _loop0_80_type 1347
-#define _loop1_81_type 1348
-#define _tmp_82_type 1349
-#define _loop0_83_type 1350
-#define _gather_84_type 1351
-#define _loop1_85_type 1352
-#define _loop0_86_type 1353
-#define _tmp_87_type 1354
-#define _loop0_88_type 1355
-#define _gather_89_type 1356
-#define _tmp_90_type 1357
-#define _loop0_91_type 1358
-#define _gather_92_type 1359
-#define _loop0_93_type 1360
-#define _gather_94_type 1361
+#define _loop1_80_type 1347
+#define _tmp_81_type 1348
+#define _loop0_82_type 1349
+#define _gather_83_type 1350
+#define _loop1_84_type 1351
+#define _loop0_85_type 1352
+#define _tmp_86_type 1353
+#define _loop0_87_type 1354
+#define _gather_88_type 1355
+#define _tmp_89_type 1356
+#define _loop0_90_type 1357
+#define _gather_91_type 1358
+#define _loop0_92_type 1359
+#define _gather_93_type 1360
+#define _loop0_94_type 1361
#define _loop0_95_type 1362
-#define _loop0_96_type 1363
-#define _gather_97_type 1364
-#define _loop1_98_type 1365
-#define _tmp_99_type 1366
-#define _loop0_100_type 1367
-#define _gather_101_type 1368
-#define _loop0_102_type 1369
-#define _gather_103_type 1370
+#define _gather_96_type 1363
+#define _loop1_97_type 1364
+#define _tmp_98_type 1365
+#define _loop0_99_type 1366
+#define _gather_100_type 1367
+#define _loop0_101_type 1368
+#define _gather_102_type 1369
+#define _tmp_103_type 1370
#define _tmp_104_type 1371
-#define _tmp_105_type 1372
-#define _loop0_106_type 1373
-#define _gather_107_type 1374
+#define _loop0_105_type 1372
+#define _gather_106_type 1373
+#define _tmp_107_type 1374
#define _tmp_108_type 1375
#define _tmp_109_type 1376
#define _tmp_110_type 1377
#define _tmp_111_type 1378
-#define _tmp_112_type 1379
-#define _loop1_113_type 1380
+#define _loop1_112_type 1379
+#define _tmp_113_type 1380
#define _tmp_114_type 1381
#define _tmp_115_type 1382
#define _tmp_116_type 1383
#define _tmp_117_type 1384
-#define _tmp_118_type 1385
+#define _loop0_118_type 1385
#define _loop0_119_type 1386
-#define _loop0_120_type 1387
+#define _tmp_120_type 1387
#define _tmp_121_type 1388
#define _tmp_122_type 1389
#define _tmp_123_type 1390
@@ -480,22 +480,22 @@ static char *soft_keywords[] = {
#define _tmp_126_type 1393
#define _tmp_127_type 1394
#define _tmp_128_type 1395
-#define _tmp_129_type 1396
-#define _loop0_130_type 1397
-#define _gather_131_type 1398
+#define _loop0_129_type 1396
+#define _gather_130_type 1397
+#define _tmp_131_type 1398
#define _tmp_132_type 1399
#define _tmp_133_type 1400
#define _tmp_134_type 1401
-#define _tmp_135_type 1402
-#define _loop0_136_type 1403
-#define _gather_137_type 1404
-#define _tmp_138_type 1405
-#define _loop0_139_type 1406
-#define _gather_140_type 1407
-#define _loop0_141_type 1408
-#define _gather_142_type 1409
-#define _tmp_143_type 1410
-#define _loop0_144_type 1411
+#define _loop0_135_type 1402
+#define _gather_136_type 1403
+#define _tmp_137_type 1404
+#define _loop0_138_type 1405
+#define _gather_139_type 1406
+#define _loop0_140_type 1407
+#define _gather_141_type 1408
+#define _tmp_142_type 1409
+#define _loop0_143_type 1410
+#define _tmp_144_type 1411
#define _tmp_145_type 1412
#define _tmp_146_type 1413
#define _tmp_147_type 1414
@@ -521,13 +521,12 @@ static char *soft_keywords[] = {
#define _tmp_167_type 1434
#define _tmp_168_type 1435
#define _tmp_169_type 1436
-#define _tmp_170_type 1437
-#define _loop0_171_type 1438
+#define _loop0_170_type 1437
+#define _tmp_171_type 1438
#define _tmp_172_type 1439
#define _tmp_173_type 1440
#define _tmp_174_type 1441
#define _tmp_175_type 1442
-#define _tmp_176_type 1443
static mod_ty file_rule(Parser *p);
static mod_ty interactive_rule(Parser *p);
@@ -809,114 +808,114 @@ static void *_tmp_9_rule(Parser *p);
static void *_tmp_10_rule(Parser *p);
static void *_tmp_11_rule(Parser *p);
static asdl_seq *_loop1_12_rule(Parser *p);
-static void *_tmp_13_rule(Parser *p);
-static asdl_seq *_loop0_14_rule(Parser *p);
-static asdl_seq *_gather_15_rule(Parser *p);
+static asdl_seq *_loop0_13_rule(Parser *p);
+static asdl_seq *_gather_14_rule(Parser *p);
+static void *_tmp_15_rule(Parser *p);
static void *_tmp_16_rule(Parser *p);
-static void *_tmp_17_rule(Parser *p);
-static asdl_seq *_loop0_18_rule(Parser *p);
-static asdl_seq *_loop1_19_rule(Parser *p);
-static asdl_seq *_loop0_20_rule(Parser *p);
-static asdl_seq *_gather_21_rule(Parser *p);
-static void *_tmp_22_rule(Parser *p);
-static asdl_seq *_loop0_23_rule(Parser *p);
-static asdl_seq *_gather_24_rule(Parser *p);
-static asdl_seq *_loop1_25_rule(Parser *p);
+static asdl_seq *_loop0_17_rule(Parser *p);
+static asdl_seq *_loop1_18_rule(Parser *p);
+static asdl_seq *_loop0_19_rule(Parser *p);
+static asdl_seq *_gather_20_rule(Parser *p);
+static void *_tmp_21_rule(Parser *p);
+static asdl_seq *_loop0_22_rule(Parser *p);
+static asdl_seq *_gather_23_rule(Parser *p);
+static asdl_seq *_loop1_24_rule(Parser *p);
+static void *_tmp_25_rule(Parser *p);
static void *_tmp_26_rule(Parser *p);
-static void *_tmp_27_rule(Parser *p);
+static asdl_seq *_loop0_27_rule(Parser *p);
static asdl_seq *_loop0_28_rule(Parser *p);
-static asdl_seq *_loop0_29_rule(Parser *p);
+static asdl_seq *_loop1_29_rule(Parser *p);
static asdl_seq *_loop1_30_rule(Parser *p);
-static asdl_seq *_loop1_31_rule(Parser *p);
-static asdl_seq *_loop0_32_rule(Parser *p);
-static asdl_seq *_loop1_33_rule(Parser *p);
-static asdl_seq *_loop0_34_rule(Parser *p);
-static asdl_seq *_gather_35_rule(Parser *p);
-static void *_tmp_36_rule(Parser *p);
+static asdl_seq *_loop0_31_rule(Parser *p);
+static asdl_seq *_loop1_32_rule(Parser *p);
+static asdl_seq *_loop0_33_rule(Parser *p);
+static asdl_seq *_gather_34_rule(Parser *p);
+static void *_tmp_35_rule(Parser *p);
+static asdl_seq *_loop1_36_rule(Parser *p);
static asdl_seq *_loop1_37_rule(Parser *p);
static asdl_seq *_loop1_38_rule(Parser *p);
-static asdl_seq *_loop1_39_rule(Parser *p);
-static asdl_seq *_loop0_40_rule(Parser *p);
-static asdl_seq *_gather_41_rule(Parser *p);
+static asdl_seq *_loop0_39_rule(Parser *p);
+static asdl_seq *_gather_40_rule(Parser *p);
+static void *_tmp_41_rule(Parser *p);
static void *_tmp_42_rule(Parser *p);
static void *_tmp_43_rule(Parser *p);
-static void *_tmp_44_rule(Parser *p);
-static asdl_seq *_loop0_45_rule(Parser *p);
-static asdl_seq *_gather_46_rule(Parser *p);
-static asdl_seq *_loop0_47_rule(Parser *p);
-static asdl_seq *_gather_48_rule(Parser *p);
-static void *_tmp_49_rule(Parser *p);
-static asdl_seq *_loop0_50_rule(Parser *p);
-static asdl_seq *_gather_51_rule(Parser *p);
-static asdl_seq *_loop0_52_rule(Parser *p);
-static asdl_seq *_gather_53_rule(Parser *p);
-static asdl_seq *_loop0_54_rule(Parser *p);
-static asdl_seq *_gather_55_rule(Parser *p);
+static asdl_seq *_loop0_44_rule(Parser *p);
+static asdl_seq *_gather_45_rule(Parser *p);
+static asdl_seq *_loop0_46_rule(Parser *p);
+static asdl_seq *_gather_47_rule(Parser *p);
+static void *_tmp_48_rule(Parser *p);
+static asdl_seq *_loop0_49_rule(Parser *p);
+static asdl_seq *_gather_50_rule(Parser *p);
+static asdl_seq *_loop0_51_rule(Parser *p);
+static asdl_seq *_gather_52_rule(Parser *p);
+static asdl_seq *_loop0_53_rule(Parser *p);
+static asdl_seq *_gather_54_rule(Parser *p);
+static asdl_seq *_loop1_55_rule(Parser *p);
static asdl_seq *_loop1_56_rule(Parser *p);
-static asdl_seq *_loop1_57_rule(Parser *p);
-static asdl_seq *_loop0_58_rule(Parser *p);
-static asdl_seq *_gather_59_rule(Parser *p);
+static asdl_seq *_loop0_57_rule(Parser *p);
+static asdl_seq *_gather_58_rule(Parser *p);
+static asdl_seq *_loop1_59_rule(Parser *p);
static asdl_seq *_loop1_60_rule(Parser *p);
static asdl_seq *_loop1_61_rule(Parser *p);
-static asdl_seq *_loop1_62_rule(Parser *p);
-static void *_tmp_63_rule(Parser *p);
-static asdl_seq *_loop0_64_rule(Parser *p);
-static asdl_seq *_gather_65_rule(Parser *p);
+static void *_tmp_62_rule(Parser *p);
+static asdl_seq *_loop0_63_rule(Parser *p);
+static asdl_seq *_gather_64_rule(Parser *p);
+static void *_tmp_65_rule(Parser *p);
static void *_tmp_66_rule(Parser *p);
static void *_tmp_67_rule(Parser *p);
static void *_tmp_68_rule(Parser *p);
static void *_tmp_69_rule(Parser *p);
-static void *_tmp_70_rule(Parser *p);
+static asdl_seq *_loop0_70_rule(Parser *p);
static asdl_seq *_loop0_71_rule(Parser *p);
-static asdl_seq *_loop0_72_rule(Parser *p);
+static asdl_seq *_loop1_72_rule(Parser *p);
static asdl_seq *_loop1_73_rule(Parser *p);
-static asdl_seq *_loop1_74_rule(Parser *p);
-static asdl_seq *_loop0_75_rule(Parser *p);
-static asdl_seq *_loop1_76_rule(Parser *p);
+static asdl_seq *_loop0_74_rule(Parser *p);
+static asdl_seq *_loop1_75_rule(Parser *p);
+static asdl_seq *_loop0_76_rule(Parser *p);
static asdl_seq *_loop0_77_rule(Parser *p);
static asdl_seq *_loop0_78_rule(Parser *p);
static asdl_seq *_loop0_79_rule(Parser *p);
-static asdl_seq *_loop0_80_rule(Parser *p);
-static asdl_seq *_loop1_81_rule(Parser *p);
-static void *_tmp_82_rule(Parser *p);
-static asdl_seq *_loop0_83_rule(Parser *p);
-static asdl_seq *_gather_84_rule(Parser *p);
-static asdl_seq *_loop1_85_rule(Parser *p);
-static asdl_seq *_loop0_86_rule(Parser *p);
-static void *_tmp_87_rule(Parser *p);
-static asdl_seq *_loop0_88_rule(Parser *p);
-static asdl_seq *_gather_89_rule(Parser *p);
-static void *_tmp_90_rule(Parser *p);
-static asdl_seq *_loop0_91_rule(Parser *p);
-static asdl_seq *_gather_92_rule(Parser *p);
-static asdl_seq *_loop0_93_rule(Parser *p);
-static asdl_seq *_gather_94_rule(Parser *p);
+static asdl_seq *_loop1_80_rule(Parser *p);
+static void *_tmp_81_rule(Parser *p);
+static asdl_seq *_loop0_82_rule(Parser *p);
+static asdl_seq *_gather_83_rule(Parser *p);
+static asdl_seq *_loop1_84_rule(Parser *p);
+static asdl_seq *_loop0_85_rule(Parser *p);
+static void *_tmp_86_rule(Parser *p);
+static asdl_seq *_loop0_87_rule(Parser *p);
+static asdl_seq *_gather_88_rule(Parser *p);
+static void *_tmp_89_rule(Parser *p);
+static asdl_seq *_loop0_90_rule(Parser *p);
+static asdl_seq *_gather_91_rule(Parser *p);
+static asdl_seq *_loop0_92_rule(Parser *p);
+static asdl_seq *_gather_93_rule(Parser *p);
+static asdl_seq *_loop0_94_rule(Parser *p);
static asdl_seq *_loop0_95_rule(Parser *p);
-static asdl_seq *_loop0_96_rule(Parser *p);
-static asdl_seq *_gather_97_rule(Parser *p);
-static asdl_seq *_loop1_98_rule(Parser *p);
-static void *_tmp_99_rule(Parser *p);
-static asdl_seq *_loop0_100_rule(Parser *p);
-static asdl_seq *_gather_101_rule(Parser *p);
-static asdl_seq *_loop0_102_rule(Parser *p);
-static asdl_seq *_gather_103_rule(Parser *p);
+static asdl_seq *_gather_96_rule(Parser *p);
+static asdl_seq *_loop1_97_rule(Parser *p);
+static void *_tmp_98_rule(Parser *p);
+static asdl_seq *_loop0_99_rule(Parser *p);
+static asdl_seq *_gather_100_rule(Parser *p);
+static asdl_seq *_loop0_101_rule(Parser *p);
+static asdl_seq *_gather_102_rule(Parser *p);
+static void *_tmp_103_rule(Parser *p);
static void *_tmp_104_rule(Parser *p);
-static void *_tmp_105_rule(Parser *p);
-static asdl_seq *_loop0_106_rule(Parser *p);
-static asdl_seq *_gather_107_rule(Parser *p);
+static asdl_seq *_loop0_105_rule(Parser *p);
+static asdl_seq *_gather_106_rule(Parser *p);
+static void *_tmp_107_rule(Parser *p);
static void *_tmp_108_rule(Parser *p);
static void *_tmp_109_rule(Parser *p);
static void *_tmp_110_rule(Parser *p);
static void *_tmp_111_rule(Parser *p);
-static void *_tmp_112_rule(Parser *p);
-static asdl_seq *_loop1_113_rule(Parser *p);
+static asdl_seq *_loop1_112_rule(Parser *p);
+static void *_tmp_113_rule(Parser *p);
static void *_tmp_114_rule(Parser *p);
static void *_tmp_115_rule(Parser *p);
static void *_tmp_116_rule(Parser *p);
static void *_tmp_117_rule(Parser *p);
-static void *_tmp_118_rule(Parser *p);
+static asdl_seq *_loop0_118_rule(Parser *p);
static asdl_seq *_loop0_119_rule(Parser *p);
-static asdl_seq *_loop0_120_rule(Parser *p);
+static void *_tmp_120_rule(Parser *p);
static void *_tmp_121_rule(Parser *p);
static void *_tmp_122_rule(Parser *p);
static void *_tmp_123_rule(Parser *p);
@@ -925,22 +924,22 @@ static void *_tmp_125_rule(Parser *p);
static void *_tmp_126_rule(Parser *p);
static void *_tmp_127_rule(Parser *p);
static void *_tmp_128_rule(Parser *p);
-static void *_tmp_129_rule(Parser *p);
-static asdl_seq *_loop0_130_rule(Parser *p);
-static asdl_seq *_gather_131_rule(Parser *p);
+static asdl_seq *_loop0_129_rule(Parser *p);
+static asdl_seq *_gather_130_rule(Parser *p);
+static void *_tmp_131_rule(Parser *p);
static void *_tmp_132_rule(Parser *p);
static void *_tmp_133_rule(Parser *p);
static void *_tmp_134_rule(Parser *p);
-static void *_tmp_135_rule(Parser *p);
-static asdl_seq *_loop0_136_rule(Parser *p);
-static asdl_seq *_gather_137_rule(Parser *p);
-static void *_tmp_138_rule(Parser *p);
-static asdl_seq *_loop0_139_rule(Parser *p);
-static asdl_seq *_gather_140_rule(Parser *p);
-static asdl_seq *_loop0_141_rule(Parser *p);
-static asdl_seq *_gather_142_rule(Parser *p);
-static void *_tmp_143_rule(Parser *p);
-static asdl_seq *_loop0_144_rule(Parser *p);
+static asdl_seq *_loop0_135_rule(Parser *p);
+static asdl_seq *_gather_136_rule(Parser *p);
+static void *_tmp_137_rule(Parser *p);
+static asdl_seq *_loop0_138_rule(Parser *p);
+static asdl_seq *_gather_139_rule(Parser *p);
+static asdl_seq *_loop0_140_rule(Parser *p);
+static asdl_seq *_gather_141_rule(Parser *p);
+static void *_tmp_142_rule(Parser *p);
+static asdl_seq *_loop0_143_rule(Parser *p);
+static void *_tmp_144_rule(Parser *p);
static void *_tmp_145_rule(Parser *p);
static void *_tmp_146_rule(Parser *p);
static void *_tmp_147_rule(Parser *p);
@@ -966,13 +965,12 @@ static void *_tmp_166_rule(Parser *p);
static void *_tmp_167_rule(Parser *p);
static void *_tmp_168_rule(Parser *p);
static void *_tmp_169_rule(Parser *p);
-static void *_tmp_170_rule(Parser *p);
-static asdl_seq *_loop0_171_rule(Parser *p);
+static asdl_seq *_loop0_170_rule(Parser *p);
+static void *_tmp_171_rule(Parser *p);
static void *_tmp_172_rule(Parser *p);
static void *_tmp_173_rule(Parser *p);
static void *_tmp_174_rule(Parser *p);
static void *_tmp_175_rule(Parser *p);
-static void *_tmp_176_rule(Parser *p);
// file: statements? $
@@ -1700,7 +1698,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt"));
stmt_ty raise_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 627) // token='raise'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 628) // token='raise'
&&
(raise_stmt_var = raise_stmt_rule(p)) // raise_stmt
)
@@ -1721,7 +1719,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'pass' pass_stmt"));
stmt_ty pass_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 526) // token='pass'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 527) // token='pass'
&&
(pass_stmt_var = pass_stmt_rule(p)) // pass_stmt
)
@@ -1742,7 +1740,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt"));
stmt_ty del_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 629) // token='del'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 630) // token='del'
&&
(del_stmt_var = del_stmt_rule(p)) // del_stmt
)
@@ -1763,7 +1761,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt"));
stmt_ty yield_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 587) // token='yield'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 588) // token='yield'
&&
(yield_stmt_var = yield_stmt_rule(p)) // yield_stmt
)
@@ -1784,7 +1782,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt"));
stmt_ty assert_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 532) // token='assert'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 533) // token='assert'
&&
(assert_stmt_var = assert_stmt_rule(p)) // assert_stmt
)
@@ -1805,7 +1803,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'break' break_stmt"));
stmt_ty break_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 527) // token='break'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 528) // token='break'
&&
(break_stmt_var = break_stmt_rule(p)) // break_stmt
)
@@ -1826,7 +1824,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'continue' continue_stmt"));
stmt_ty continue_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 528) // token='continue'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 529) // token='continue'
&&
(continue_stmt_var = continue_stmt_rule(p)) // continue_stmt
)
@@ -1847,7 +1845,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt"));
stmt_ty global_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 529) // token='global'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 530) // token='global'
&&
(global_stmt_var = global_stmt_rule(p)) // global_stmt
)
@@ -1868,7 +1866,7 @@ simple_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt"));
stmt_ty nonlocal_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 530) // token='nonlocal'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 531) // token='nonlocal'
&&
(nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt
)
@@ -1938,7 +1936,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt"));
stmt_ty if_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 686) // token='if'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 687) // token='if'
&&
(if_stmt_var = if_stmt_rule(p)) // if_stmt
)
@@ -2022,7 +2020,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt"));
stmt_ty try_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 660) // token='try'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 661) // token='try'
&&
(try_stmt_var = try_stmt_rule(p)) // try_stmt
)
@@ -2043,7 +2041,7 @@ compound_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt"));
stmt_ty while_stmt_var;
if (
- _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 693) // token='while'
+ _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 694) // token='while'
&&
(while_stmt_var = while_stmt_rule(p)) // while_stmt
)
@@ -2769,7 +2767,11 @@ return_stmt_rule(Parser *p)
return _res;
}
-// raise_stmt: invalid_raise_stmt | 'raise' expression ['from' expression] | 'raise'
+// raise_stmt:
+// | 'raise' expression 'from' expression
+// | invalid_raise_stmt
+// | 'raise' expression
+// | 'raise'
static stmt_ty
raise_stmt_rule(Parser *p)
{
@@ -2791,6 +2793,48 @@ raise_stmt_rule(Parser *p)
UNUSED(_start_lineno); // Only used by EXTRA macro
int _start_col_offset = p->tokens[_mark]->col_offset;
UNUSED(_start_col_offset); // Only used by EXTRA macro
+ { // 'raise' expression 'from' expression
+ if (p->error_indicator) {
+ p->level--;
+ return NULL;
+ }
+ D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' expression 'from' expression"));
+ Token * _keyword;
+ Token * _keyword_1;
+ expr_ty a;
+ expr_ty b;
+ if (
+ (_keyword = _PyPegen_expect_token(p, 628)) // token='raise'
+ &&
+ (a = expression_rule(p)) // expression
+ &&
+ (_keyword_1 = _PyPegen_expect_token(p, 638)) // token='from'
+ &&
+ (b = expression_rule(p)) // expression
+ )
+ {
+ D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression 'from' expression"));
+ Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
+ if (_token == NULL) {
+ p->level--;
+ return NULL;
+ }
+ int _end_lineno = _token->end_lineno;
+ UNUSED(_end_lineno); // Only used by EXTRA macro
+ int _end_col_offset = _token->end_col_offset;
+ UNUSED(_end_col_offset); // Only used by EXTRA macro
+ _res = _PyAST_Raise ( a , b , EXTRA );
+ if (_res == NULL && PyErr_Occurred()) {
+ p->error_indicator = 1;
+ p->level--;
+ return NULL;
+ }
+ goto done;
+ }
+ p->mark = _mark;
+ D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' expression 'from' expression"));
+ }
if (p->call_invalid_rules) { // invalid_raise_stmt
if (p->error_indicator) {
p->level--;
@@ -2810,24 +2854,21 @@ raise_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_raise_stmt"));
}
- { // 'raise' expression ['from' expression]
+ { // 'raise' expression
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]"));
+ D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' expression"));
Token * _keyword;
expr_ty a;
- void *b;
if (
- (_keyword = _PyPegen_expect_token(p, 627)) // token='raise'
+ (_keyword = _PyPegen_expect_token(p, 628)) // token='raise'
&&
(a = expression_rule(p)) // expression
- &&
- (b = _tmp_13_rule(p), !p->error_indicator) // ['from' expression]
)
{
- D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]"));
+ D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression"));
Token *_token = _PyPegen_get_last_nonnwhitespace_token(p);
if (_token == NULL) {
p->level--;
@@ -2837,7 +2878,7 @@ raise_stmt_rule(Parser *p)
UNUSED(_end_lineno); // Only used by EXTRA macro
int _end_col_offset = _token->end_col_offset;
UNUSED(_end_col_offset); // Only used by EXTRA macro
- _res = _PyAST_Raise ( a , b , EXTRA );
+ _res = _PyAST_Raise ( a , NULL , EXTRA );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
p->level--;
@@ -2847,7 +2888,7 @@ raise_stmt_rule(Parser *p)
}
p->mark = _mark;
D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' expression ['from' expression]"));
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' expression"));
}
{ // 'raise'
if (p->error_indicator) {
@@ -2857,7 +2898,7 @@ raise_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 627)) // token='raise'
+ (_keyword = _PyPegen_expect_token(p, 628)) // token='raise'
)
{
D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'"));
@@ -2918,7 +2959,7 @@ pass_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> pass_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'pass'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 526)) // token='pass'
+ (_keyword = _PyPegen_expect_token(p, 527)) // token='pass'
)
{
D(fprintf(stderr, "%*c+ pass_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'pass'"));
@@ -2979,7 +3020,7 @@ break_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> break_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'break'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 527)) // token='break'
+ (_keyword = _PyPegen_expect_token(p, 528)) // token='break'
)
{
D(fprintf(stderr, "%*c+ break_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'break'"));
@@ -3040,7 +3081,7 @@ continue_stmt_rule(Parser *p)
D(fprintf(stderr, "%*c> continue_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'continue'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 528)) // token='continue'
+ (_keyword = _PyPegen_expect_token(p, 529)) // token='continue'
)
{
D(fprintf(stderr, "%*c+ continue_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'continue'"));
@@ -3102,9 +3143,9 @@ global_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 529)) // token='global'
+ (_keyword = _PyPegen_expect_token(p, 530)) // token='global'
&&
- (a = (asdl_expr_seq*)_gather_15_rule(p)) // ','.NAME+
+ (a = (asdl_expr_seq*)_gather_14_rule(p)) // ','.NAME+
)
{
D(fprintf(stderr, "%*c+ global_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+"));
@@ -3166,9 +3207,9 @@ nonlocal_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 530)) // token='nonlocal'
+ (_keyword = _PyPegen_expect_token(p, 531)) // token='nonlocal'
&&
- (a = (asdl_expr_seq*)_gather_15_rule(p)) // ','.NAME+
+ (a = (asdl_expr_seq*)_gather_14_rule(p)) // ','.NAME+
)
{
D(fprintf(stderr, "%*c+ nonlocal_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+"));
@@ -3230,11 +3271,11 @@ del_stmt_rule(Parser *p)
Token * _keyword;
asdl_expr_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 629)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='del'
&&
(a = del_targets_rule(p)) // del_targets
&&
- _PyPegen_lookahead(1, _tmp_16_rule, p)
+ _PyPegen_lookahead(1, _tmp_15_rule, p)
)
{
D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets &(';' | NEWLINE)"));
@@ -3377,11 +3418,11 @@ assert_stmt_rule(Parser *p)
expr_ty a;
void *b;
if (
- (_keyword = _PyPegen_expect_token(p, 532)) // token='assert'
+ (_keyword = _PyPegen_expect_token(p, 533)) // token='assert'
&&
(a = expression_rule(p)) // expression
&&
- (b = _tmp_17_rule(p), !p->error_indicator) // [',' expression]
+ (b = _tmp_16_rule(p), !p->error_indicator) // [',' expression]
)
{
D(fprintf(stderr, "%*c+ assert_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]"));
@@ -3519,7 +3560,7 @@ import_name_rule(Parser *p)
Token * _keyword;
asdl_alias_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 638)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 639)) // token='import'
&&
(a = dotted_as_names_rule(p)) // dotted_as_names
)
@@ -3588,13 +3629,13 @@ import_from_rule(Parser *p)
expr_ty b;
asdl_alias_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='from'
&&
- (a = _loop0_18_rule(p)) // (('.' | '...'))*
+ (a = _loop0_17_rule(p)) // (('.' | '...'))*
&&
(b = dotted_name_rule(p)) // dotted_name
&&
- (_keyword_1 = _PyPegen_expect_token(p, 638)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 639)) // token='import'
&&
(c = import_from_targets_rule(p)) // import_from_targets
)
@@ -3632,11 +3673,11 @@ import_from_rule(Parser *p)
asdl_seq * a;
asdl_alias_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='from'
&&
- (a = _loop1_19_rule(p)) // (('.' | '...'))+
+ (a = _loop1_18_rule(p)) // (('.' | '...'))+
&&
- (_keyword_1 = _PyPegen_expect_token(p, 638)) // token='import'
+ (_keyword_1 = _PyPegen_expect_token(p, 639)) // token='import'
&&
(b = import_from_targets_rule(p)) // import_from_targets
)
@@ -3829,7 +3870,7 @@ import_from_as_names_rule(Parser *p)
D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+"));
asdl_alias_seq* a;
if (
- (a = (asdl_alias_seq*)_gather_21_rule(p)) // ','.import_from_as_name+
+ (a = (asdl_alias_seq*)_gather_20_rule(p)) // ','.import_from_as_name+
)
{
D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+"));
@@ -3903,7 +3944,7 @@ import_from_as_name_rule(Parser *p)
if (
(a = _PyPegen_name_token(p)) // NAME
&&
- (b = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (b = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
)
{
D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]"));
@@ -3955,7 +3996,7 @@ dotted_as_names_rule(Parser *p)
D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+"));
asdl_alias_seq* a;
if (
- (a = (asdl_alias_seq*)_gather_24_rule(p)) // ','.dotted_as_name+
+ (a = (asdl_alias_seq*)_gather_23_rule(p)) // ','.dotted_as_name+
)
{
D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+"));
@@ -4029,7 +4070,7 @@ dotted_as_name_rule(Parser *p)
if (
(a = dotted_name_rule(p)) // dotted_name
&&
- (b = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (b = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
)
{
D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]"));
@@ -4280,7 +4321,7 @@ decorators_rule(Parser *p)
D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+"));
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_loop1_25_rule(p)) // (('@' named_expression NEWLINE))+
+ (a = (asdl_expr_seq*)_loop1_24_rule(p)) // (('@' named_expression NEWLINE))+
)
{
D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+"));
@@ -4423,13 +4464,13 @@ class_def_raw_rule(Parser *p)
asdl_stmt_seq* c;
void *t;
if (
- (_keyword = _PyPegen_expect_token(p, 705)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 706)) // token='class'
&&
(a = _PyPegen_name_token(p)) // NAME
&&
(t = type_params_rule(p), !p->error_indicator) // type_params?
&&
- (b = _tmp_26_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (b = _tmp_25_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -4590,7 +4631,7 @@ function_def_raw_rule(Parser *p)
void *t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 703)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 704)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4602,7 +4643,7 @@ function_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (a = _tmp_27_rule(p), !p->error_indicator) // ['->' expression]
+ (a = _tmp_26_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -4651,9 +4692,9 @@ function_def_raw_rule(Parser *p)
void *t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 703)) // token='def'
+ (_keyword_1 = _PyPegen_expect_token(p, 704)) // token='def'
&&
(n = _PyPegen_name_token(p)) // NAME
&&
@@ -4665,7 +4706,7 @@ function_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (a = _tmp_27_rule(p), !p->error_indicator) // ['->' expression]
+ (a = _tmp_26_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -4790,9 +4831,9 @@ parameters_rule(Parser *p)
if (
(a = slash_no_default_rule(p)) // slash_no_default
&&
- (b = (asdl_arg_seq*)_loop0_28_rule(p)) // param_no_default*
+ (b = (asdl_arg_seq*)_loop0_27_rule(p)) // param_no_default*
&&
- (c = _loop0_29_rule(p)) // param_with_default*
+ (c = _loop0_28_rule(p)) // param_with_default*
&&
(d = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4822,7 +4863,7 @@ parameters_rule(Parser *p)
if (
(a = slash_with_default_rule(p)) // slash_with_default
&&
- (b = _loop0_29_rule(p)) // param_with_default*
+ (b = _loop0_28_rule(p)) // param_with_default*
&&
(c = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4850,9 +4891,9 @@ parameters_rule(Parser *p)
asdl_seq * b;
void *c;
if (
- (a = (asdl_arg_seq*)_loop1_30_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_29_rule(p)) // param_no_default+
&&
- (b = _loop0_29_rule(p)) // param_with_default*
+ (b = _loop0_28_rule(p)) // param_with_default*
&&
(c = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4879,7 +4920,7 @@ parameters_rule(Parser *p)
asdl_seq * a;
void *b;
if (
- (a = _loop1_31_rule(p)) // param_with_default+
+ (a = _loop1_30_rule(p)) // param_with_default+
&&
(b = star_etc_rule(p), !p->error_indicator) // star_etc?
)
@@ -4950,7 +4991,7 @@ slash_no_default_rule(Parser *p)
Token * _literal_1;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_30_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_29_rule(p)) // param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -4979,7 +5020,7 @@ slash_no_default_rule(Parser *p)
Token * _literal;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_30_rule(p)) // param_no_default+
+ (a = (asdl_arg_seq*)_loop1_29_rule(p)) // param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -5031,9 +5072,9 @@ slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_28_rule(p)) // param_no_default*
+ (a = _loop0_27_rule(p)) // param_no_default*
&&
- (b = _loop1_31_rule(p)) // param_with_default+
+ (b = _loop1_30_rule(p)) // param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -5063,9 +5104,9 @@ slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_28_rule(p)) // param_no_default*
+ (a = _loop0_27_rule(p)) // param_no_default*
&&
- (b = _loop1_31_rule(p)) // param_with_default+
+ (b = _loop1_30_rule(p)) // param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -5143,7 +5184,7 @@ star_etc_rule(Parser *p)
&&
(a = param_no_default_rule(p)) // param_no_default
&&
- (b = _loop0_32_rule(p)) // param_maybe_default*
+ (b = _loop0_31_rule(p)) // param_maybe_default*
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5176,7 +5217,7 @@ star_etc_rule(Parser *p)
&&
(a = param_no_default_star_annotation_rule(p)) // param_no_default_star_annotation
&&
- (b = _loop0_32_rule(p)) // param_maybe_default*
+ (b = _loop0_31_rule(p)) // param_maybe_default*
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5209,7 +5250,7 @@ star_etc_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _loop1_33_rule(p)) // param_maybe_default+
+ (b = _loop1_32_rule(p)) // param_maybe_default+
&&
(c = kwds_rule(p), !p->error_indicator) // kwds?
)
@@ -5991,7 +6032,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6036,7 +6077,7 @@ if_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6131,7 +6172,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
stmt_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 691)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 692)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6176,7 +6217,7 @@ elif_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 691)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 692)) // token='elif'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6257,7 +6298,7 @@ else_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 690)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='else'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -6336,7 +6377,7 @@ while_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *c;
if (
- (_keyword = _PyPegen_expect_token(p, 693)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 694)) // token='while'
&&
(a = named_expression_rule(p)) // named_expression
&&
@@ -6436,11 +6477,11 @@ for_stmt_rule(Parser *p)
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 699)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 700)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6498,13 +6539,13 @@ for_stmt_rule(Parser *p)
expr_ty t;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='for'
&&
(t = star_targets_rule(p)) // star_targets
&&
- (_keyword_2 = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword_2 = _PyPegen_expect_token(p, 700)) // token='in'
&&
(_cut_var = 1)
&&
@@ -6633,11 +6674,11 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -6684,9 +6725,9 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='with'
&&
- (a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -6733,13 +6774,13 @@ with_stmt_rule(Parser *p)
asdl_withitem_seq* a;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='with'
+ (_keyword_1 = _PyPegen_expect_token(p, 652)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -6785,11 +6826,11 @@ with_stmt_rule(Parser *p)
asdl_stmt_seq* b;
void *tc;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 651)) // token='with'
+ (_keyword_1 = _PyPegen_expect_token(p, 652)) // token='with'
&&
- (a = (asdl_withitem_seq*)_gather_35_rule(p)) // ','.with_item+
+ (a = (asdl_withitem_seq*)_gather_34_rule(p)) // ','.with_item+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -6873,11 +6914,11 @@ with_item_rule(Parser *p)
if (
(e = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
(t = star_target_rule(p)) // star_target
&&
- _PyPegen_lookahead(1, _tmp_36_rule, p)
+ _PyPegen_lookahead(1, _tmp_35_rule, p)
)
{
D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' star_target &(',' | ')' | ':')"));
@@ -6998,7 +7039,7 @@ try_stmt_rule(Parser *p)
asdl_stmt_seq* b;
asdl_stmt_seq* f;
if (
- (_keyword = _PyPegen_expect_token(p, 660)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7042,13 +7083,13 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 660)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
(b = block_rule(p)) // block
&&
- (ex = (asdl_excepthandler_seq*)_loop1_37_rule(p)) // except_block+
+ (ex = (asdl_excepthandler_seq*)_loop1_36_rule(p)) // except_block+
&&
(el = else_block_rule(p), !p->error_indicator) // else_block?
&&
@@ -7090,13 +7131,13 @@ try_stmt_rule(Parser *p)
asdl_excepthandler_seq* ex;
void *f;
if (
- (_keyword = _PyPegen_expect_token(p, 660)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='try'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
(b = block_rule(p)) // block
&&
- (ex = (asdl_excepthandler_seq*)_loop1_38_rule(p)) // except_star_block+
+ (ex = (asdl_excepthandler_seq*)_loop1_37_rule(p)) // except_star_block+
&&
(el = else_block_rule(p), !p->error_indicator) // else_block?
&&
@@ -7189,7 +7230,7 @@ except_block_rule(Parser *p)
asdl_stmt_seq* b;
expr_ty e;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(e = expression_rule(p)) // expression
&&
@@ -7233,11 +7274,11 @@ except_block_rule(Parser *p)
expr_ty e;
expr_ty t;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(e = expression_rule(p)) // expression
&&
- (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='as'
&&
(t = _PyPegen_name_token(p)) // NAME
&&
@@ -7279,7 +7320,7 @@ except_block_rule(Parser *p)
asdl_stmt_seq* b;
expr_ty e;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(e = expressions_rule(p)) // expressions
&&
@@ -7320,7 +7361,7 @@ except_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* b;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -7432,7 +7473,7 @@ except_star_block_rule(Parser *p)
asdl_stmt_seq* b;
expr_ty e;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -7479,13 +7520,13 @@ except_star_block_rule(Parser *p)
expr_ty e;
expr_ty t;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(e = expression_rule(p)) // expression
&&
- (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='as'
&&
(t = _PyPegen_name_token(p)) // NAME
&&
@@ -7528,7 +7569,7 @@ except_star_block_rule(Parser *p)
asdl_stmt_seq* b;
expr_ty e;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -7628,7 +7669,7 @@ finally_block_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* a;
if (
- (_keyword = _PyPegen_expect_token(p, 677)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 678)) // token='finally'
&&
(_literal = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -7702,7 +7743,7 @@ match_stmt_rule(Parser *p)
&&
(indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT'
&&
- (cases = (asdl_match_case_seq*)_loop1_39_rule(p)) // case_block+
+ (cases = (asdl_match_case_seq*)_loop1_38_rule(p)) // case_block+
&&
(dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT'
)
@@ -7936,7 +7977,7 @@ guard_rule(Parser *p)
Token * _keyword;
expr_ty guard;
if (
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(guard = named_expression_rule(p)) // named_expression
)
@@ -8131,7 +8172,7 @@ as_pattern_rule(Parser *p)
if (
(pattern = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
(target = pattern_capture_target_rule(p)) // pattern_capture_target
)
@@ -8213,7 +8254,7 @@ or_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> or_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+"));
asdl_pattern_seq* patterns;
if (
- (patterns = (asdl_pattern_seq*)_gather_41_rule(p)) // '|'.closed_pattern+
+ (patterns = (asdl_pattern_seq*)_gather_40_rule(p)) // '|'.closed_pattern+
)
{
D(fprintf(stderr, "%*c+ or_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'|'.closed_pattern+"));
@@ -8466,7 +8507,7 @@ literal_pattern_rule(Parser *p)
if (
(value = signed_number_rule(p)) // signed_number
&&
- _PyPegen_lookahead(0, _tmp_42_rule, p)
+ _PyPegen_lookahead(0, _tmp_41_rule, p)
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')"));
@@ -8565,7 +8606,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8598,7 +8639,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 622)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 623)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8631,7 +8672,7 @@ literal_pattern_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 625)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -8700,7 +8741,7 @@ literal_expr_rule(Parser *p)
if (
(signed_number_var = signed_number_rule(p)) // signed_number
&&
- _PyPegen_lookahead(0, _tmp_42_rule, p)
+ _PyPegen_lookahead(0, _tmp_41_rule, p)
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "signed_number !('+' | '-')"));
@@ -8738,7 +8779,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START | TSTRING_START) strings"));
expr_ty strings_var;
if (
- _PyPegen_lookahead(1, _tmp_43_rule, p)
+ _PyPegen_lookahead(1, _tmp_42_rule, p)
&&
(strings_var = strings_rule(p)) // strings
)
@@ -8759,7 +8800,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='None'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -8792,7 +8833,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 622)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 623)) // token='True'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -8825,7 +8866,7 @@ literal_expr_rule(Parser *p)
D(fprintf(stderr, "%*c> literal_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 625)) // token='False'
)
{
D(fprintf(stderr, "%*c+ literal_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -9302,7 +9343,7 @@ pattern_capture_target_rule(Parser *p)
&&
(name = _PyPegen_name_token(p)) // NAME
&&
- _PyPegen_lookahead(0, _tmp_44_rule, p)
+ _PyPegen_lookahead(0, _tmp_43_rule, p)
)
{
D(fprintf(stderr, "%*c+ pattern_capture_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!\"_\" NAME !('.' | '(' | '=')"));
@@ -9417,7 +9458,7 @@ value_pattern_rule(Parser *p)
if (
(attr = attr_rule(p)) // attr
&&
- _PyPegen_lookahead(0, _tmp_44_rule, p)
+ _PyPegen_lookahead(0, _tmp_43_rule, p)
)
{
D(fprintf(stderr, "%*c+ value_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr !('.' | '(' | '=')"));
@@ -9836,7 +9877,7 @@ maybe_sequence_pattern_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_seq * patterns;
if (
- (patterns = _gather_46_rule(p)) // ','.maybe_star_pattern+
+ (patterns = _gather_45_rule(p)) // ','.maybe_star_pattern+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -10244,13 +10285,13 @@ items_pattern_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> items_pattern[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+"));
- asdl_seq * _gather_48_var;
+ asdl_seq * _gather_47_var;
if (
- (_gather_48_var = _gather_48_rule(p)) // ','.key_value_pattern+
+ (_gather_47_var = _gather_47_rule(p)) // ','.key_value_pattern+
)
{
D(fprintf(stderr, "%*c+ items_pattern[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.key_value_pattern+"));
- _res = _gather_48_var;
+ _res = _gather_47_var;
goto done;
}
p->mark = _mark;
@@ -10286,7 +10327,7 @@ key_value_pattern_rule(Parser *p)
void *key;
pattern_ty pattern;
if (
- (key = _tmp_49_rule(p)) // literal_expr | attr
+ (key = _tmp_48_rule(p)) // literal_expr | attr
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -10614,7 +10655,7 @@ positional_patterns_rule(Parser *p)
D(fprintf(stderr, "%*c> positional_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.pattern+"));
asdl_pattern_seq* args;
if (
- (args = (asdl_pattern_seq*)_gather_51_rule(p)) // ','.pattern+
+ (args = (asdl_pattern_seq*)_gather_50_rule(p)) // ','.pattern+
)
{
D(fprintf(stderr, "%*c+ positional_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.pattern+"));
@@ -10655,13 +10696,13 @@ keyword_patterns_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> keyword_patterns[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+"));
- asdl_seq * _gather_53_var;
+ asdl_seq * _gather_52_var;
if (
- (_gather_53_var = _gather_53_rule(p)) // ','.keyword_pattern+
+ (_gather_52_var = _gather_52_rule(p)) // ','.keyword_pattern+
)
{
D(fprintf(stderr, "%*c+ keyword_patterns[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.keyword_pattern+"));
- _res = _gather_53_var;
+ _res = _gather_52_var;
goto done;
}
p->mark = _mark;
@@ -10887,7 +10928,7 @@ type_param_seq_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_type_param_seq* a;
if (
- (a = (asdl_type_param_seq*)_gather_55_rule(p)) // ','.type_param+
+ (a = (asdl_type_param_seq*)_gather_54_rule(p)) // ','.type_param+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -11257,7 +11298,7 @@ expressions_rule(Parser *p)
if (
(a = expression_rule(p)) // expression
&&
- (b = _loop1_56_rule(p)) // ((',' expression))+
+ (b = _loop1_55_rule(p)) // ((',' expression))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -11428,11 +11469,11 @@ expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 691)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -11536,9 +11577,9 @@ yield_expr_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 587)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 588)) // token='yield'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 637)) // token='from'
+ (_keyword_1 = _PyPegen_expect_token(p, 638)) // token='from'
&&
(a = expression_rule(p)) // expression
)
@@ -11574,7 +11615,7 @@ yield_expr_rule(Parser *p)
Token * _keyword;
void *a;
if (
- (_keyword = _PyPegen_expect_token(p, 587)) // token='yield'
+ (_keyword = _PyPegen_expect_token(p, 588)) // token='yield'
&&
(a = star_expressions_rule(p), !p->error_indicator) // star_expressions?
)
@@ -11645,7 +11686,7 @@ star_expressions_rule(Parser *p)
if (
(a = star_expression_rule(p)) // star_expression
&&
- (b = _loop1_57_rule(p)) // ((',' star_expression))+
+ (b = _loop1_56_rule(p)) // ((',' star_expression))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -11844,7 +11885,7 @@ star_named_expressions_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_59_rule(p)) // ','.star_named_expression+
+ (a = (asdl_expr_seq*)_gather_58_rule(p)) // ','.star_named_expression+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -12140,7 +12181,7 @@ disjunction_rule(Parser *p)
if (
(a = conjunction_rule(p)) // conjunction
&&
- (b = _loop1_60_rule(p)) // (('or' conjunction))+
+ (b = _loop1_59_rule(p)) // (('or' conjunction))+
)
{
D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+"));
@@ -12228,7 +12269,7 @@ conjunction_rule(Parser *p)
if (
(a = inversion_rule(p)) // inversion
&&
- (b = _loop1_61_rule(p)) // (('and' inversion))+
+ (b = _loop1_60_rule(p)) // (('and' inversion))+
)
{
D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+"));
@@ -12314,7 +12355,7 @@ inversion_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 707)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 708)) // token='not'
&&
(a = inversion_rule(p)) // inversion
)
@@ -12400,7 +12441,7 @@ comparison_rule(Parser *p)
if (
(a = bitwise_or_rule(p)) // bitwise_or
&&
- (b = _loop1_62_rule(p)) // compare_op_bitwise_or_pair+
+ (b = _loop1_61_rule(p)) // compare_op_bitwise_or_pair+
)
{
D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+"));
@@ -12734,10 +12775,10 @@ noteq_bitwise_or_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or"));
- void *_tmp_63_var;
+ void *_tmp_62_var;
expr_ty a;
if (
- (_tmp_63_var = _tmp_63_rule(p)) // '!='
+ (_tmp_62_var = _tmp_62_rule(p)) // '!='
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -12968,9 +13009,9 @@ notin_bitwise_or_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 707)) // token='not'
+ (_keyword = _PyPegen_expect_token(p, 708)) // token='not'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 700)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -13016,7 +13057,7 @@ in_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 700)) // token='in'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -13063,9 +13104,9 @@ isnot_bitwise_or_rule(Parser *p)
Token * _keyword_1;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 596)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 597)) // token='is'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 707)) // token='not'
+ (_keyword_1 = _PyPegen_expect_token(p, 708)) // token='not'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -13111,7 +13152,7 @@ is_bitwise_or_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 596)) // token='is'
+ (_keyword = _PyPegen_expect_token(p, 597)) // token='is'
&&
(a = bitwise_or_rule(p)) // bitwise_or
)
@@ -14427,7 +14468,7 @@ await_primary_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 597)) // token='await'
+ (_keyword = _PyPegen_expect_token(p, 598)) // token='await'
&&
(a = primary_rule(p)) // primary
)
@@ -14785,7 +14826,7 @@ slices_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_65_rule(p)) // ','.(slice | starred_expression)+
+ (a = (asdl_expr_seq*)_gather_64_rule(p)) // ','.(slice | starred_expression)+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -14857,7 +14898,7 @@ slice_rule(Parser *p)
&&
(b = expression_rule(p), !p->error_indicator) // expression?
&&
- (c = _tmp_66_rule(p), !p->error_indicator) // [':' expression?]
+ (c = _tmp_65_rule(p), !p->error_indicator) // [':' expression?]
)
{
D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]"));
@@ -14971,7 +15012,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 622)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 623)) // token='True'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
@@ -15004,7 +15045,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 625)) // token='False'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
@@ -15037,7 +15078,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='None'
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
@@ -15070,7 +15111,7 @@ atom_rule(Parser *p)
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&(STRING | FSTRING_START | TSTRING_START) strings"));
expr_ty strings_var;
if (
- _PyPegen_lookahead(1, _tmp_43_rule, p)
+ _PyPegen_lookahead(1, _tmp_42_rule, p)
&&
(strings_var = strings_rule(p)) // strings
)
@@ -15108,15 +15149,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)"));
- void *_tmp_67_var;
+ void *_tmp_66_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='('
&&
- (_tmp_67_var = _tmp_67_rule(p)) // tuple | group | genexp
+ (_tmp_66_var = _tmp_66_rule(p)) // tuple | group | genexp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)"));
- _res = _tmp_67_var;
+ _res = _tmp_66_var;
goto done;
}
p->mark = _mark;
@@ -15129,15 +15170,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)"));
- void *_tmp_68_var;
+ void *_tmp_67_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='['
&&
- (_tmp_68_var = _tmp_68_rule(p)) // list | listcomp
+ (_tmp_67_var = _tmp_67_rule(p)) // list | listcomp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)"));
- _res = _tmp_68_var;
+ _res = _tmp_67_var;
goto done;
}
p->mark = _mark;
@@ -15150,15 +15191,15 @@ atom_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)"));
- void *_tmp_69_var;
+ void *_tmp_68_var;
if (
_PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{'
&&
- (_tmp_69_var = _tmp_69_rule(p)) // dict | set | dictcomp | setcomp
+ (_tmp_68_var = _tmp_68_rule(p)) // dict | set | dictcomp | setcomp
)
{
D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)"));
- _res = _tmp_69_var;
+ _res = _tmp_68_var;
goto done;
}
p->mark = _mark;
@@ -15229,7 +15270,7 @@ group_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_70_rule(p)) // yield_expr | named_expression
+ (a = _tmp_69_rule(p)) // yield_expr | named_expression
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -15305,7 +15346,7 @@ lambdef_rule(Parser *p)
void *a;
expr_ty b;
if (
- (_keyword = _PyPegen_expect_token(p, 621)) // token='lambda'
+ (_keyword = _PyPegen_expect_token(p, 622)) // token='lambda'
&&
(a = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -15430,9 +15471,9 @@ lambda_parameters_rule(Parser *p)
if (
(a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
&&
- (b = (asdl_arg_seq*)_loop0_71_rule(p)) // lambda_param_no_default*
+ (b = (asdl_arg_seq*)_loop0_70_rule(p)) // lambda_param_no_default*
&&
- (c = _loop0_72_rule(p)) // lambda_param_with_default*
+ (c = _loop0_71_rule(p)) // lambda_param_with_default*
&&
(d = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -15462,7 +15503,7 @@ lambda_parameters_rule(Parser *p)
if (
(a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
&&
- (b = _loop0_72_rule(p)) // lambda_param_with_default*
+ (b = _loop0_71_rule(p)) // lambda_param_with_default*
&&
(c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -15490,9 +15531,9 @@ lambda_parameters_rule(Parser *p)
asdl_seq * b;
void *c;
if (
- (a = (asdl_arg_seq*)_loop1_73_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_72_rule(p)) // lambda_param_no_default+
&&
- (b = _loop0_72_rule(p)) // lambda_param_with_default*
+ (b = _loop0_71_rule(p)) // lambda_param_with_default*
&&
(c = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -15519,7 +15560,7 @@ lambda_parameters_rule(Parser *p)
asdl_seq * a;
void *b;
if (
- (a = _loop1_74_rule(p)) // lambda_param_with_default+
+ (a = _loop1_73_rule(p)) // lambda_param_with_default+
&&
(b = lambda_star_etc_rule(p), !p->error_indicator) // lambda_star_etc?
)
@@ -15592,7 +15633,7 @@ lambda_slash_no_default_rule(Parser *p)
Token * _literal_1;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_73_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_72_rule(p)) // lambda_param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -15621,7 +15662,7 @@ lambda_slash_no_default_rule(Parser *p)
Token * _literal;
asdl_arg_seq* a;
if (
- (a = (asdl_arg_seq*)_loop1_73_rule(p)) // lambda_param_no_default+
+ (a = (asdl_arg_seq*)_loop1_72_rule(p)) // lambda_param_no_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -15673,9 +15714,9 @@ lambda_slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_71_rule(p)) // lambda_param_no_default*
+ (a = _loop0_70_rule(p)) // lambda_param_no_default*
&&
- (b = _loop1_74_rule(p)) // lambda_param_with_default+
+ (b = _loop1_73_rule(p)) // lambda_param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -15705,9 +15746,9 @@ lambda_slash_with_default_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _loop0_71_rule(p)) // lambda_param_no_default*
+ (a = _loop0_70_rule(p)) // lambda_param_no_default*
&&
- (b = _loop1_74_rule(p)) // lambda_param_with_default+
+ (b = _loop1_73_rule(p)) // lambda_param_with_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -15784,7 +15825,7 @@ lambda_star_etc_rule(Parser *p)
&&
(a = lambda_param_no_default_rule(p)) // lambda_param_no_default
&&
- (b = _loop0_75_rule(p)) // lambda_param_maybe_default*
+ (b = _loop0_74_rule(p)) // lambda_param_maybe_default*
&&
(c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds?
)
@@ -15817,7 +15858,7 @@ lambda_star_etc_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _loop1_76_rule(p)) // lambda_param_maybe_default+
+ (b = _loop1_75_rule(p)) // lambda_param_maybe_default+
&&
(c = lambda_kwds_rule(p), !p->error_indicator) // lambda_kwds?
)
@@ -16457,7 +16498,7 @@ fstring_full_format_spec_rule(Parser *p)
if (
(colon = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (spec = _loop0_77_rule(p)) // fstring_format_spec*
+ (spec = _loop0_76_rule(p)) // fstring_format_spec*
)
{
D(fprintf(stderr, "%*c+ fstring_full_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' fstring_format_spec*"));
@@ -16575,7 +16616,7 @@ fstring_rule(Parser *p)
if (
(a = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START'
&&
- (b = _loop0_78_rule(p)) // fstring_middle*
+ (b = _loop0_77_rule(p)) // fstring_middle*
&&
(c = _PyPegen_expect_token(p, FSTRING_END)) // token='FSTRING_END'
)
@@ -16791,7 +16832,7 @@ tstring_full_format_spec_rule(Parser *p)
if (
(colon = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (spec = _loop0_79_rule(p)) // tstring_format_spec*
+ (spec = _loop0_78_rule(p)) // tstring_format_spec*
)
{
D(fprintf(stderr, "%*c+ tstring_full_format_spec[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' tstring_format_spec*"));
@@ -17010,7 +17051,7 @@ tstring_rule(Parser *p)
if (
(a = _PyPegen_expect_token(p, TSTRING_START)) // token='TSTRING_START'
&&
- (b = _loop0_80_rule(p)) // tstring_middle*
+ (b = _loop0_79_rule(p)) // tstring_middle*
&&
(c = _PyPegen_expect_token(p, TSTRING_END)) // token='TSTRING_END'
)
@@ -17112,7 +17153,7 @@ strings_rule(Parser *p)
D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((fstring | string | tstring))+"));
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_loop1_81_rule(p)) // ((fstring | string | tstring))+
+ (a = (asdl_expr_seq*)_loop1_80_rule(p)) // ((fstring | string | tstring))+
)
{
D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((fstring | string | tstring))+"));
@@ -17245,7 +17286,7 @@ tuple_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_82_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?]
+ (a = _tmp_81_rule(p), !p->error_indicator) // [star_named_expression ',' star_named_expressions?]
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
@@ -17460,7 +17501,7 @@ double_starred_kvpairs_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_seq * a;
if (
- (a = _gather_84_rule(p)) // ','.double_starred_kvpair+
+ (a = _gather_83_rule(p)) // ','.double_starred_kvpair+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -17619,7 +17660,7 @@ for_if_clauses_rule(Parser *p)
D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+"));
asdl_comprehension_seq* a;
if (
- (a = (asdl_comprehension_seq*)_loop1_85_rule(p)) // for_if_clause+
+ (a = (asdl_comprehension_seq*)_loop1_84_rule(p)) // for_if_clause+
)
{
D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+"));
@@ -17672,19 +17713,19 @@ for_if_clause_rule(Parser *p)
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
&&
- (_keyword_1 = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_2 = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword_2 = _PyPegen_expect_token(p, 700)) // token='in'
&&
(_cut_var = 1)
&&
(b = disjunction_rule(p)) // disjunction
&&
- (c = (asdl_expr_seq*)_loop0_86_rule(p)) // (('if' disjunction))*
+ (c = (asdl_expr_seq*)_loop0_85_rule(p)) // (('if' disjunction))*
)
{
D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async' 'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
@@ -17717,17 +17758,17 @@ for_if_clause_rule(Parser *p)
expr_ty b;
asdl_expr_seq* c;
if (
- (_keyword = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 699)) // token='for'
&&
(a = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 700)) // token='in'
&&
(_cut_var = 1)
&&
(b = disjunction_rule(p)) // disjunction
&&
- (c = (asdl_expr_seq*)_loop0_86_rule(p)) // (('if' disjunction))*
+ (c = (asdl_expr_seq*)_loop0_85_rule(p)) // (('if' disjunction))*
)
{
D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ disjunction (('if' disjunction))*"));
@@ -18006,7 +18047,7 @@ genexp_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (a = _tmp_87_rule(p)) // assignment_expression | expression !':='
+ (a = _tmp_86_rule(p)) // assignment_expression | expression !':='
&&
(b = for_if_clauses_rule(p)) // for_if_clauses
&&
@@ -18255,9 +18296,9 @@ args_rule(Parser *p)
asdl_expr_seq* a;
void *b;
if (
- (a = (asdl_expr_seq*)_gather_89_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
+ (a = (asdl_expr_seq*)_gather_88_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
&&
- (b = _tmp_90_rule(p), !p->error_indicator) // [',' kwargs]
+ (b = _tmp_89_rule(p), !p->error_indicator) // [',' kwargs]
)
{
D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ [',' kwargs]"));
@@ -18347,11 +18388,11 @@ kwargs_rule(Parser *p)
asdl_seq * a;
asdl_seq * b;
if (
- (a = _gather_92_rule(p)) // ','.kwarg_or_starred+
+ (a = _gather_91_rule(p)) // ','.kwarg_or_starred+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (b = _gather_94_rule(p)) // ','.kwarg_or_double_starred+
+ (b = _gather_93_rule(p)) // ','.kwarg_or_double_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+"));
@@ -18373,13 +18414,13 @@ kwargs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+"));
- asdl_seq * _gather_92_var;
+ asdl_seq * _gather_91_var;
if (
- (_gather_92_var = _gather_92_rule(p)) // ','.kwarg_or_starred+
+ (_gather_91_var = _gather_91_rule(p)) // ','.kwarg_or_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+"));
- _res = _gather_92_var;
+ _res = _gather_91_var;
goto done;
}
p->mark = _mark;
@@ -18392,13 +18433,13 @@ kwargs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+"));
- asdl_seq * _gather_94_var;
+ asdl_seq * _gather_93_var;
if (
- (_gather_94_var = _gather_94_rule(p)) // ','.kwarg_or_double_starred+
+ (_gather_93_var = _gather_93_rule(p)) // ','.kwarg_or_double_starred+
)
{
D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+"));
- _res = _gather_94_var;
+ _res = _gather_93_var;
goto done;
}
p->mark = _mark;
@@ -18809,7 +18850,7 @@ star_targets_rule(Parser *p)
if (
(a = star_target_rule(p)) // star_target
&&
- (b = _loop0_95_rule(p)) // ((',' star_target))*
+ (b = _loop0_94_rule(p)) // ((',' star_target))*
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -18865,7 +18906,7 @@ star_targets_list_seq_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_97_rule(p)) // ','.star_target+
+ (a = (asdl_expr_seq*)_gather_96_rule(p)) // ','.star_target+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -18915,7 +18956,7 @@ star_targets_tuple_seq_rule(Parser *p)
if (
(a = star_target_rule(p)) // star_target
&&
- (b = _loop1_98_rule(p)) // ((',' star_target))+
+ (b = _loop1_97_rule(p)) // ((',' star_target))+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -19003,7 +19044,7 @@ star_target_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (a = _tmp_99_rule(p)) // !'*' star_target
+ (a = _tmp_98_rule(p)) // !'*' star_target
)
{
D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)"));
@@ -19926,7 +19967,7 @@ del_targets_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_101_rule(p)) // ','.del_target+
+ (a = (asdl_expr_seq*)_gather_100_rule(p)) // ','.del_target+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
@@ -20284,7 +20325,7 @@ type_expressions_rule(Parser *p)
expr_ty b;
expr_ty c;
if (
- (a = _gather_103_rule(p)) // ','.expression+
+ (a = _gather_102_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -20323,7 +20364,7 @@ type_expressions_rule(Parser *p)
asdl_seq * a;
expr_ty b;
if (
- (a = _gather_103_rule(p)) // ','.expression+
+ (a = _gather_102_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -20356,7 +20397,7 @@ type_expressions_rule(Parser *p)
asdl_seq * a;
expr_ty b;
if (
- (a = _gather_103_rule(p)) // ','.expression+
+ (a = _gather_102_rule(p)) // ','.expression+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -20476,7 +20517,7 @@ type_expressions_rule(Parser *p)
D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+"));
asdl_expr_seq* a;
if (
- (a = (asdl_expr_seq*)_gather_103_rule(p)) // ','.expression+
+ (a = (asdl_expr_seq*)_gather_102_rule(p)) // ','.expression+
)
{
D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+"));
@@ -20527,7 +20568,7 @@ func_type_comment_rule(Parser *p)
&&
(t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT'
&&
- _PyPegen_lookahead(1, _tmp_104_rule, p)
+ _PyPegen_lookahead(1, _tmp_103_rule, p)
)
{
D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)"));
@@ -20613,15 +20654,15 @@ invalid_arguments_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs) ',' ','.(starred_expression !'=')+"));
- asdl_seq * _gather_107_var;
- void *_tmp_105_var;
+ asdl_seq * _gather_106_var;
+ void *_tmp_104_var;
Token * a;
if (
- (_tmp_105_var = _tmp_105_rule(p)) // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs
+ (_tmp_104_var = _tmp_104_rule(p)) // (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs
&&
(a = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_gather_107_var = _gather_107_rule(p)) // ','.(starred_expression !'=')+
+ (_gather_106_var = _gather_106_rule(p)) // ','.(starred_expression !'=')+
)
{
D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs) | kwargs) ',' ','.(starred_expression !'=')+"));
@@ -20655,7 +20696,7 @@ invalid_arguments_rule(Parser *p)
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_opt_var = _tmp_108_rule(p), !p->error_indicator) // [args | expression for_if_clauses]
+ (_opt_var = _tmp_107_rule(p), !p->error_indicator) // [args | expression for_if_clauses]
)
{
D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]"));
@@ -20715,13 +20756,13 @@ invalid_arguments_rule(Parser *p)
expr_ty a;
Token * b;
if (
- (_opt_var = _tmp_109_rule(p), !p->error_indicator) // [(args ',')]
+ (_opt_var = _tmp_108_rule(p), !p->error_indicator) // [(args ',')]
&&
(a = _PyPegen_name_token(p)) // NAME
&&
(b = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(1, _tmp_110_rule, p)
+ _PyPegen_lookahead(1, _tmp_109_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "[(args ',')] NAME '=' &(',' | ')')"));
@@ -20859,7 +20900,7 @@ invalid_kwarg_rule(Parser *p)
Token* a;
Token * b;
if (
- (a = (Token*)_tmp_111_rule(p)) // 'True' | 'False' | 'None'
+ (a = (Token*)_tmp_110_rule(p)) // 'True' | 'False' | 'None'
&&
(b = _PyPegen_expect_token(p, 22)) // token='='
)
@@ -20919,7 +20960,7 @@ invalid_kwarg_rule(Parser *p)
expr_ty a;
Token * b;
if (
- _PyPegen_lookahead(0, _tmp_112_rule, p)
+ _PyPegen_lookahead(0, _tmp_111_rule, p)
&&
(a = expression_rule(p)) // expression
&&
@@ -21022,11 +21063,11 @@ expression_without_invalid_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 691)) // token='else'
&&
(c = expression_rule(p)) // expression
)
@@ -21267,7 +21308,7 @@ invalid_expression_rule(Parser *p)
if (
(string_var = _PyPegen_string_token(p)) // STRING
&&
- (a = _loop1_113_rule(p)) // ((!STRING expression_without_invalid))+
+ (a = _loop1_112_rule(p)) // ((!STRING expression_without_invalid))+
&&
(string_var_1 = _PyPegen_string_token(p)) // STRING
)
@@ -21294,7 +21335,7 @@ invalid_expression_rule(Parser *p)
expr_ty a;
expr_ty b;
if (
- _PyPegen_lookahead(0, _tmp_114_rule, p)
+ _PyPegen_lookahead(0, _tmp_113_rule, p)
&&
(a = disjunction_rule(p)) // disjunction
&&
@@ -21326,11 +21367,11 @@ invalid_expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- _PyPegen_lookahead(0, _tmp_115_rule, p)
+ _PyPegen_lookahead(0, _tmp_114_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction !('else' | ':')"));
@@ -21359,11 +21400,11 @@ invalid_expression_rule(Parser *p)
if (
(a = disjunction_rule(p)) // disjunction
&&
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 691)) // token='else'
&&
_PyPegen_lookahead_for_expr(0, expression_rule, p)
)
@@ -21393,13 +21434,13 @@ invalid_expression_rule(Parser *p)
expr_ty b;
stmt_ty c;
if (
- (a = (stmt_ty)_tmp_116_rule(p)) // pass_stmt | break_stmt | continue_stmt
+ (a = (stmt_ty)_tmp_115_rule(p)) // pass_stmt | break_stmt | continue_stmt
&&
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(b = disjunction_rule(p)) // disjunction
&&
- (_keyword_1 = _PyPegen_expect_token(p, 690)) // token='else'
+ (_keyword_1 = _PyPegen_expect_token(p, 691)) // token='else'
&&
(c = simple_stmt_rule(p)) // simple_stmt
)
@@ -21428,7 +21469,7 @@ invalid_expression_rule(Parser *p)
Token * a;
Token * b;
if (
- (a = _PyPegen_expect_token(p, 621)) // token='lambda'
+ (a = _PyPegen_expect_token(p, 622)) // token='lambda'
&&
(_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -21461,7 +21502,7 @@ invalid_expression_rule(Parser *p)
Token * a;
Token * b;
if (
- (a = _PyPegen_expect_token(p, 621)) // token='lambda'
+ (a = _PyPegen_expect_token(p, 622)) // token='lambda'
&&
(_opt_var = lambda_params_rule(p), !p->error_indicator) // lambda_params?
&&
@@ -21555,7 +21596,7 @@ invalid_named_expression_rule(Parser *p)
&&
(b = bitwise_or_rule(p)) // bitwise_or
&&
- _PyPegen_lookahead(0, _tmp_117_rule, p)
+ _PyPegen_lookahead(0, _tmp_116_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' bitwise_or !('=' | ':=')"));
@@ -21581,7 +21622,7 @@ invalid_named_expression_rule(Parser *p)
Token * b;
expr_ty bitwise_or_var;
if (
- _PyPegen_lookahead(0, _tmp_118_rule, p)
+ _PyPegen_lookahead(0, _tmp_117_rule, p)
&&
(a = bitwise_or_rule(p)) // bitwise_or
&&
@@ -21589,7 +21630,7 @@ invalid_named_expression_rule(Parser *p)
&&
(bitwise_or_var = bitwise_or_rule(p)) // bitwise_or
&&
- _PyPegen_lookahead(0, _tmp_117_rule, p)
+ _PyPegen_lookahead(0, _tmp_116_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!(list | tuple | genexp | 'True' | 'None' | 'False') bitwise_or '=' bitwise_or !('=' | ':=')"));
@@ -21669,7 +21710,7 @@ invalid_assignment_rule(Parser *p)
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression"));
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_119_var;
+ asdl_seq * _loop0_118_var;
expr_ty a;
expr_ty expression_var;
if (
@@ -21677,7 +21718,7 @@ invalid_assignment_rule(Parser *p)
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_loop0_119_var = _loop0_119_rule(p)) // star_named_expressions*
+ (_loop0_118_var = _loop0_118_rule(p)) // star_named_expressions*
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -21734,10 +21775,10 @@ invalid_assignment_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='"));
Token * _literal;
- asdl_seq * _loop0_120_var;
+ asdl_seq * _loop0_119_var;
expr_ty a;
if (
- (_loop0_120_var = _loop0_120_rule(p)) // ((star_targets '='))*
+ (_loop0_119_var = _loop0_119_rule(p)) // ((star_targets '='))*
&&
(a = star_expressions_rule(p)) // star_expressions
&&
@@ -21764,10 +21805,10 @@ invalid_assignment_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='"));
Token * _literal;
- asdl_seq * _loop0_120_var;
+ asdl_seq * _loop0_119_var;
expr_ty a;
if (
- (_loop0_120_var = _loop0_120_rule(p)) // ((star_targets '='))*
+ (_loop0_119_var = _loop0_119_rule(p)) // ((star_targets '='))*
&&
(a = yield_expr_rule(p)) // yield_expr
&&
@@ -21932,9 +21973,9 @@ invalid_raise_stmt_rule(Parser *p)
Token * a;
Token * b;
if (
- (a = _PyPegen_expect_token(p, 627)) // token='raise'
+ (a = _PyPegen_expect_token(p, 628)) // token='raise'
&&
- (b = _PyPegen_expect_token(p, 637)) // token='from'
+ (b = _PyPegen_expect_token(p, 638)) // token='from'
)
{
D(fprintf(stderr, "%*c+ invalid_raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' 'from'"));
@@ -21960,11 +22001,11 @@ invalid_raise_stmt_rule(Parser *p)
Token * a;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 627)) // token='raise'
+ (_keyword = _PyPegen_expect_token(p, 628)) // token='raise'
&&
(expression_var = expression_rule(p)) // expression
&&
- (a = _PyPegen_expect_token(p, 637)) // token='from'
+ (a = _PyPegen_expect_token(p, 638)) // token='from'
)
{
D(fprintf(stderr, "%*c+ invalid_raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression 'from'"));
@@ -22008,7 +22049,7 @@ invalid_del_stmt_rule(Parser *p)
Token * _keyword;
expr_ty a;
if (
- (_keyword = _PyPegen_expect_token(p, 629)) // token='del'
+ (_keyword = _PyPegen_expect_token(p, 630)) // token='del'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -22099,11 +22140,11 @@ invalid_comprehension_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses"));
- void *_tmp_121_var;
+ void *_tmp_120_var;
expr_ty a;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_121_var = _tmp_121_rule(p)) // '[' | '(' | '{'
+ (_tmp_120_var = _tmp_120_rule(p)) // '[' | '(' | '{'
&&
(a = starred_expression_rule(p)) // starred_expression
&&
@@ -22130,12 +22171,12 @@ invalid_comprehension_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' star_named_expressions for_if_clauses"));
Token * _literal;
- void *_tmp_122_var;
+ void *_tmp_121_var;
expr_ty a;
asdl_expr_seq* b;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_122_var = _tmp_122_rule(p)) // '[' | '{'
+ (_tmp_121_var = _tmp_121_rule(p)) // '[' | '{'
&&
(a = star_named_expression_rule(p)) // star_named_expression
&&
@@ -22165,12 +22206,12 @@ invalid_comprehension_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '{') star_named_expression ',' for_if_clauses"));
- void *_tmp_122_var;
+ void *_tmp_121_var;
expr_ty a;
Token * b;
asdl_comprehension_seq* for_if_clauses_var;
if (
- (_tmp_122_var = _tmp_122_rule(p)) // '[' | '{'
+ (_tmp_121_var = _tmp_121_rule(p)) // '[' | '{'
&&
(a = star_named_expression_rule(p)) // star_named_expression
&&
@@ -22305,13 +22346,13 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slash_no_default | slash_with_default) param_maybe_default* '/'"));
- asdl_seq * _loop0_32_var;
- void *_tmp_123_var;
+ asdl_seq * _loop0_31_var;
+ void *_tmp_122_var;
Token * a;
if (
- (_tmp_123_var = _tmp_123_rule(p)) // slash_no_default | slash_with_default
+ (_tmp_122_var = _tmp_122_rule(p)) // slash_no_default | slash_with_default
&&
- (_loop0_32_var = _loop0_32_rule(p)) // param_maybe_default*
+ (_loop0_31_var = _loop0_31_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -22335,7 +22376,7 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default? param_no_default* invalid_parameters_helper param_no_default"));
- asdl_seq * _loop0_28_var;
+ asdl_seq * _loop0_27_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
arg_ty a;
@@ -22343,7 +22384,7 @@ invalid_parameters_rule(Parser *p)
if (
(_opt_var = slash_no_default_rule(p), !p->error_indicator) // slash_no_default?
&&
- (_loop0_28_var = _loop0_28_rule(p)) // param_no_default*
+ (_loop0_27_var = _loop0_27_rule(p)) // param_no_default*
&&
(invalid_parameters_helper_var = invalid_parameters_helper_rule(p)) // invalid_parameters_helper
&&
@@ -22369,18 +22410,18 @@ invalid_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* '(' param_no_default+ ','? ')'"));
- asdl_seq * _loop0_28_var;
- asdl_seq * _loop1_30_var;
+ asdl_seq * _loop0_27_var;
+ asdl_seq * _loop1_29_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
if (
- (_loop0_28_var = _loop0_28_rule(p)) // param_no_default*
+ (_loop0_27_var = _loop0_27_rule(p)) // param_no_default*
&&
(a = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_loop1_30_var = _loop1_30_rule(p)) // param_no_default+
+ (_loop1_29_var = _loop1_29_rule(p)) // param_no_default+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -22407,22 +22448,22 @@ invalid_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(slash_no_default | slash_with_default)] param_maybe_default* '*' (',' | param_no_default) param_maybe_default* '/'"));
Token * _literal;
- asdl_seq * _loop0_32_var;
- asdl_seq * _loop0_32_var_1;
+ asdl_seq * _loop0_31_var;
+ asdl_seq * _loop0_31_var_1;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
- void *_tmp_124_var;
+ void *_tmp_123_var;
Token * a;
if (
- (_opt_var = _tmp_123_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)]
+ (_opt_var = _tmp_122_rule(p), !p->error_indicator) // [(slash_no_default | slash_with_default)]
&&
- (_loop0_32_var = _loop0_32_rule(p)) // param_maybe_default*
+ (_loop0_31_var = _loop0_31_rule(p)) // param_maybe_default*
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_124_var = _tmp_124_rule(p)) // ',' | param_no_default
+ (_tmp_123_var = _tmp_123_rule(p)) // ',' | param_no_default
&&
- (_loop0_32_var_1 = _loop0_32_rule(p)) // param_maybe_default*
+ (_loop0_31_var_1 = _loop0_31_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -22447,10 +22488,10 @@ invalid_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default+ '/' '*'"));
Token * _literal;
- asdl_seq * _loop1_33_var;
+ asdl_seq * _loop1_32_var;
Token * a;
if (
- (_loop1_33_var = _loop1_33_rule(p)) // param_maybe_default+
+ (_loop1_32_var = _loop1_32_rule(p)) // param_maybe_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -22499,7 +22540,7 @@ invalid_default_rule(Parser *p)
if (
(a = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(1, _tmp_125_rule, p)
+ _PyPegen_lookahead(1, _tmp_124_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' &(')' | ',')"));
@@ -22544,12 +22585,12 @@ invalid_star_etc_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))"));
- void *_tmp_126_var;
+ void *_tmp_125_var;
Token * a;
if (
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_126_var = _tmp_126_rule(p)) // ')' | ',' (')' | '**')
+ (_tmp_125_var = _tmp_125_rule(p)) // ')' | ',' (')' | '**')
)
{
D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))"));
@@ -22632,20 +22673,20 @@ invalid_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')"));
Token * _literal;
- asdl_seq * _loop0_32_var;
- void *_tmp_127_var;
- void *_tmp_127_var_1;
+ asdl_seq * _loop0_31_var;
+ void *_tmp_126_var;
+ void *_tmp_126_var_1;
Token * a;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_127_var = _tmp_127_rule(p)) // param_no_default | ','
+ (_tmp_126_var = _tmp_126_rule(p)) // param_no_default | ','
&&
- (_loop0_32_var = _loop0_32_rule(p)) // param_maybe_default*
+ (_loop0_31_var = _loop0_31_rule(p)) // param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_127_var_1 = _tmp_127_rule(p)) // param_no_default | ','
+ (_tmp_126_var_1 = _tmp_126_rule(p)) // param_no_default | ','
)
{
D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (param_no_default | ',') param_maybe_default* '*' (param_no_default | ',')"));
@@ -22760,7 +22801,7 @@ invalid_kwds_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (a = (Token*)_tmp_128_rule(p)) // '*' | '**' | '/'
+ (a = (Token*)_tmp_127_rule(p)) // '*' | '**' | '/'
)
{
D(fprintf(stderr, "%*c+ invalid_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param ',' ('*' | '**' | '/')"));
@@ -22825,13 +22866,13 @@ invalid_parameters_helper_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+"));
- asdl_seq * _loop1_31_var;
+ asdl_seq * _loop1_30_var;
if (
- (_loop1_31_var = _loop1_31_rule(p)) // param_with_default+
+ (_loop1_30_var = _loop1_30_rule(p)) // param_with_default+
)
{
D(fprintf(stderr, "%*c+ invalid_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+"));
- _res = _loop1_31_var;
+ _res = _loop1_30_var;
goto done;
}
p->mark = _mark;
@@ -22896,13 +22937,13 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(lambda_slash_no_default | lambda_slash_with_default) lambda_param_maybe_default* '/'"));
- asdl_seq * _loop0_75_var;
- void *_tmp_129_var;
+ asdl_seq * _loop0_74_var;
+ void *_tmp_128_var;
Token * a;
if (
- (_tmp_129_var = _tmp_129_rule(p)) // lambda_slash_no_default | lambda_slash_with_default
+ (_tmp_128_var = _tmp_128_rule(p)) // lambda_slash_no_default | lambda_slash_with_default
&&
- (_loop0_75_var = _loop0_75_rule(p)) // lambda_param_maybe_default*
+ (_loop0_74_var = _loop0_74_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -22926,7 +22967,7 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default? lambda_param_no_default* invalid_lambda_parameters_helper lambda_param_no_default"));
- asdl_seq * _loop0_71_var;
+ asdl_seq * _loop0_70_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
arg_ty a;
@@ -22934,7 +22975,7 @@ invalid_lambda_parameters_rule(Parser *p)
if (
(_opt_var = lambda_slash_no_default_rule(p), !p->error_indicator) // lambda_slash_no_default?
&&
- (_loop0_71_var = _loop0_71_rule(p)) // lambda_param_no_default*
+ (_loop0_70_var = _loop0_70_rule(p)) // lambda_param_no_default*
&&
(invalid_lambda_parameters_helper_var = invalid_lambda_parameters_helper_rule(p)) // invalid_lambda_parameters_helper
&&
@@ -22960,18 +23001,18 @@ invalid_lambda_parameters_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* '(' ','.lambda_param+ ','? ')'"));
- asdl_seq * _gather_131_var;
- asdl_seq * _loop0_71_var;
+ asdl_seq * _gather_130_var;
+ asdl_seq * _loop0_70_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
if (
- (_loop0_71_var = _loop0_71_rule(p)) // lambda_param_no_default*
+ (_loop0_70_var = _loop0_70_rule(p)) // lambda_param_no_default*
&&
(a = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_131_var = _gather_131_rule(p)) // ','.lambda_param+
+ (_gather_130_var = _gather_130_rule(p)) // ','.lambda_param+
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -22998,22 +23039,22 @@ invalid_lambda_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "[(lambda_slash_no_default | lambda_slash_with_default)] lambda_param_maybe_default* '*' (',' | lambda_param_no_default) lambda_param_maybe_default* '/'"));
Token * _literal;
- asdl_seq * _loop0_75_var;
- asdl_seq * _loop0_75_var_1;
+ asdl_seq * _loop0_74_var;
+ asdl_seq * _loop0_74_var_1;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
- void *_tmp_132_var;
+ void *_tmp_131_var;
Token * a;
if (
- (_opt_var = _tmp_129_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)]
+ (_opt_var = _tmp_128_rule(p), !p->error_indicator) // [(lambda_slash_no_default | lambda_slash_with_default)]
&&
- (_loop0_75_var = _loop0_75_rule(p)) // lambda_param_maybe_default*
+ (_loop0_74_var = _loop0_74_rule(p)) // lambda_param_maybe_default*
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_132_var = _tmp_132_rule(p)) // ',' | lambda_param_no_default
+ (_tmp_131_var = _tmp_131_rule(p)) // ',' | lambda_param_no_default
&&
- (_loop0_75_var_1 = _loop0_75_rule(p)) // lambda_param_maybe_default*
+ (_loop0_74_var_1 = _loop0_74_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 17)) // token='/'
)
@@ -23038,10 +23079,10 @@ invalid_lambda_parameters_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default+ '/' '*'"));
Token * _literal;
- asdl_seq * _loop1_76_var;
+ asdl_seq * _loop1_75_var;
Token * a;
if (
- (_loop1_76_var = _loop1_76_rule(p)) // lambda_param_maybe_default+
+ (_loop1_75_var = _loop1_75_rule(p)) // lambda_param_maybe_default+
&&
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
&&
@@ -23112,13 +23153,13 @@ invalid_lambda_parameters_helper_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_lambda_parameters_helper[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+"));
- asdl_seq * _loop1_74_var;
+ asdl_seq * _loop1_73_var;
if (
- (_loop1_74_var = _loop1_74_rule(p)) // lambda_param_with_default+
+ (_loop1_73_var = _loop1_73_rule(p)) // lambda_param_with_default+
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_parameters_helper[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+"));
- _res = _loop1_74_var;
+ _res = _loop1_73_var;
goto done;
}
p->mark = _mark;
@@ -23154,11 +23195,11 @@ invalid_lambda_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))"));
Token * _literal;
- void *_tmp_133_var;
+ void *_tmp_132_var;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_133_var = _tmp_133_rule(p)) // ':' | ',' (':' | '**')
+ (_tmp_132_var = _tmp_132_rule(p)) // ':' | ',' (':' | '**')
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))"));
@@ -23211,20 +23252,20 @@ invalid_lambda_star_etc_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')"));
Token * _literal;
- asdl_seq * _loop0_75_var;
- void *_tmp_134_var;
- void *_tmp_134_var_1;
+ asdl_seq * _loop0_74_var;
+ void *_tmp_133_var;
+ void *_tmp_133_var_1;
Token * a;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_134_var = _tmp_134_rule(p)) // lambda_param_no_default | ','
+ (_tmp_133_var = _tmp_133_rule(p)) // lambda_param_no_default | ','
&&
- (_loop0_75_var = _loop0_75_rule(p)) // lambda_param_maybe_default*
+ (_loop0_74_var = _loop0_74_rule(p)) // lambda_param_maybe_default*
&&
(a = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_134_var_1 = _tmp_134_rule(p)) // lambda_param_no_default | ','
+ (_tmp_133_var_1 = _tmp_133_rule(p)) // lambda_param_no_default | ','
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (lambda_param_no_default | ',') lambda_param_maybe_default* '*' (lambda_param_no_default | ',')"));
@@ -23342,7 +23383,7 @@ invalid_lambda_kwds_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 12)) // token=','
&&
- (a = (Token*)_tmp_128_rule(p)) // '*' | '**' | '/'
+ (a = (Token*)_tmp_127_rule(p)) // '*' | '**' | '/'
)
{
D(fprintf(stderr, "%*c+ invalid_lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param ',' ('*' | '**' | '/')"));
@@ -23444,11 +23485,11 @@ invalid_with_item_rule(Parser *p)
if (
(expression_var = expression_rule(p)) // expression
&&
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
- _PyPegen_lookahead(1, _tmp_36_rule, p)
+ _PyPegen_lookahead(1, _tmp_35_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression &(',' | ')' | ':')"));
@@ -23492,15 +23533,15 @@ invalid_for_if_clause_rule(Parser *p)
Token * _keyword;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
- void *_tmp_135_var;
+ void *_tmp_134_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 699)) // token='for'
&&
- (_tmp_135_var = _tmp_135_rule(p)) // bitwise_or ((',' bitwise_or))* ','?
+ (_tmp_134_var = _tmp_134_rule(p)) // bitwise_or ((',' bitwise_or))* ','?
&&
- _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 699) // token='in'
+ _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 700) // token='in'
)
{
D(fprintf(stderr, "%*c+ invalid_for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'? 'for' (bitwise_or ((',' bitwise_or))* ','?) !'in'"));
@@ -23546,9 +23587,9 @@ invalid_for_target_rule(Parser *p)
UNUSED(_opt_var); // Silence compiler warnings
expr_ty a;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 699)) // token='for'
&&
(a = star_expressions_rule(p)) // star_expressions
)
@@ -23673,16 +23714,16 @@ invalid_import_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_import[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import' ','.dotted_name+ 'from' dotted_name"));
- asdl_seq * _gather_137_var;
+ asdl_seq * _gather_136_var;
Token * _keyword;
Token * a;
expr_ty dotted_name_var;
if (
- (a = _PyPegen_expect_token(p, 638)) // token='import'
+ (a = _PyPegen_expect_token(p, 639)) // token='import'
&&
- (_gather_137_var = _gather_137_rule(p)) // ','.dotted_name+
+ (_gather_136_var = _gather_136_rule(p)) // ','.dotted_name+
&&
- (_keyword = _PyPegen_expect_token(p, 637)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='from'
&&
(dotted_name_var = dotted_name_rule(p)) // dotted_name
)
@@ -23709,7 +23750,7 @@ invalid_import_rule(Parser *p)
Token * _keyword;
Token * token;
if (
- (_keyword = _PyPegen_expect_token(p, 638)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 639)) // token='import'
&&
(token = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -23758,9 +23799,9 @@ invalid_dotted_as_name_rule(Parser *p)
if (
(dotted_name_var = dotted_name_rule(p)) // dotted_name
&&
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
- _PyPegen_lookahead(0, _tmp_138_rule, p)
+ _PyPegen_lookahead(0, _tmp_137_rule, p)
&&
(a = expression_rule(p)) // expression
)
@@ -23809,9 +23850,9 @@ invalid_import_from_as_name_rule(Parser *p)
if (
(name_var = _PyPegen_name_token(p)) // NAME
&&
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
- _PyPegen_lookahead(0, _tmp_138_rule, p)
+ _PyPegen_lookahead(0, _tmp_137_rule, p)
&&
(a = expression_rule(p)) // expression
)
@@ -23929,17 +23970,17 @@ invalid_with_stmt_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ NEWLINE"));
- asdl_seq * _gather_140_var;
+ asdl_seq * _gather_139_var;
Token * _keyword;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 651)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='with'
&&
- (_gather_140_var = _gather_140_rule(p)) // ','.(expression ['as' star_target])+
+ (_gather_139_var = _gather_139_rule(p)) // ','.(expression ['as' star_target])+
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -23963,7 +24004,7 @@ invalid_with_stmt_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' NEWLINE"));
- asdl_seq * _gather_142_var;
+ asdl_seq * _gather_141_var;
Token * _keyword;
Token * _literal;
Token * _literal_1;
@@ -23973,13 +24014,13 @@ invalid_with_stmt_rule(Parser *p)
UNUSED(_opt_var_1); // Silence compiler warnings
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 651)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_142_var = _gather_142_rule(p)) // ','.(expressions ['as' star_target])+
+ (_gather_141_var = _gather_141_rule(p)) // ','.(expressions ['as' star_target])+
&&
(_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -24028,18 +24069,18 @@ invalid_with_stmt_indent_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' ','.(expression ['as' star_target])+ ':' NEWLINE !INDENT"));
- asdl_seq * _gather_140_var;
+ asdl_seq * _gather_139_var;
Token * _literal;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 651)) // token='with'
+ (a = _PyPegen_expect_token(p, 652)) // token='with'
&&
- (_gather_140_var = _gather_140_rule(p)) // ','.(expression ['as' star_target])+
+ (_gather_139_var = _gather_139_rule(p)) // ','.(expression ['as' star_target])+
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -24067,7 +24108,7 @@ invalid_with_stmt_indent_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_with_stmt_indent[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'? 'with' '(' ','.(expressions ['as' star_target])+ ','? ')' ':' NEWLINE !INDENT"));
- asdl_seq * _gather_142_var;
+ asdl_seq * _gather_141_var;
Token * _literal;
Token * _literal_1;
Token * _literal_2;
@@ -24078,13 +24119,13 @@ invalid_with_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 651)) // token='with'
+ (a = _PyPegen_expect_token(p, 652)) // token='with'
&&
(_literal = _PyPegen_expect_token(p, 7)) // token='('
&&
- (_gather_142_var = _gather_142_rule(p)) // ','.(expressions ['as' star_target])+
+ (_gather_141_var = _gather_141_rule(p)) // ','.(expressions ['as' star_target])+
&&
(_opt_var_1 = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
&&
@@ -24143,7 +24184,7 @@ invalid_try_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 660)) // token='try'
+ (a = _PyPegen_expect_token(p, 661)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -24175,13 +24216,13 @@ invalid_try_stmt_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* block_var;
if (
- (_keyword = _PyPegen_expect_token(p, 660)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
(block_var = block_rule(p)) // block
&&
- _PyPegen_lookahead(0, _tmp_143_rule, p)
+ _PyPegen_lookahead(0, _tmp_142_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block !('except' | 'finally')"));
@@ -24206,29 +24247,29 @@ invalid_try_stmt_rule(Parser *p)
Token * _keyword;
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_144_var;
- asdl_seq * _loop1_37_var;
+ asdl_seq * _loop0_143_var;
+ asdl_seq * _loop1_36_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
Token * b;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 660)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_144_var = _loop0_144_rule(p)) // block*
+ (_loop0_143_var = _loop0_143_rule(p)) // block*
&&
- (_loop1_37_var = _loop1_37_rule(p)) // except_block+
+ (_loop1_36_var = _loop1_36_rule(p)) // except_block+
&&
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(b = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -24255,23 +24296,23 @@ invalid_try_stmt_rule(Parser *p)
Token * _keyword;
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_144_var;
- asdl_seq * _loop1_38_var;
+ asdl_seq * _loop0_143_var;
+ asdl_seq * _loop1_37_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
Token * a;
if (
- (_keyword = _PyPegen_expect_token(p, 660)) // token='try'
+ (_keyword = _PyPegen_expect_token(p, 661)) // token='try'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_144_var = _loop0_144_rule(p)) // block*
+ (_loop0_143_var = _loop0_143_rule(p)) // block*
&&
- (_loop1_38_var = _loop1_38_rule(p)) // except_star_block+
+ (_loop1_37_var = _loop1_37_rule(p)) // except_star_block+
&&
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
- (_opt_var = _tmp_145_rule(p), !p->error_indicator) // [expression ['as' NAME]]
+ (_opt_var = _tmp_144_rule(p), !p->error_indicator) // [expression ['as' NAME]]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
)
@@ -24326,7 +24367,7 @@ invalid_except_stmt_rule(Parser *p)
expr_ty expressions_var;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(a = expression_rule(p)) // expression
&&
@@ -24334,7 +24375,7 @@ invalid_except_stmt_rule(Parser *p)
&&
(expressions_var = expressions_rule(p)) // expressions
&&
- (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -24366,11 +24407,11 @@ invalid_except_stmt_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -24397,7 +24438,7 @@ invalid_except_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -24428,11 +24469,11 @@ invalid_except_stmt_rule(Parser *p)
asdl_stmt_seq* block_var;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
@@ -24492,7 +24533,7 @@ invalid_except_star_stmt_rule(Parser *p)
expr_ty expressions_var;
expr_ty name_var;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
@@ -24502,7 +24543,7 @@ invalid_except_star_stmt_rule(Parser *p)
&&
(expressions_var = expressions_rule(p)) // expressions
&&
- (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='as'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -24535,13 +24576,13 @@ invalid_except_star_stmt_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -24566,14 +24607,14 @@ invalid_except_star_stmt_rule(Parser *p)
}
D(fprintf(stderr, "%*c> invalid_except_star_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')"));
Token * _literal;
- void *_tmp_146_var;
+ void *_tmp_145_var;
Token * a;
if (
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
- (_tmp_146_var = _tmp_146_rule(p)) // NEWLINE | ':'
+ (_tmp_145_var = _tmp_145_rule(p)) // NEWLINE | ':'
)
{
D(fprintf(stderr, "%*c+ invalid_except_star_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' '*' (NEWLINE | ':')"));
@@ -24603,13 +24644,13 @@ invalid_except_star_stmt_rule(Parser *p)
asdl_stmt_seq* block_var;
expr_ty expression_var;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_keyword_1 = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword_1 = _PyPegen_expect_token(p, 685)) // token='as'
&&
(a = expression_rule(p)) // expression
&&
@@ -24660,7 +24701,7 @@ invalid_finally_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 677)) // token='finally'
+ (a = _PyPegen_expect_token(p, 678)) // token='finally'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -24716,11 +24757,11 @@ invalid_except_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -24752,7 +24793,7 @@ invalid_except_stmt_indent_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -24808,13 +24849,13 @@ invalid_except_star_stmt_indent_rule(Parser *p)
expr_ty expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 681)) // token='except'
+ (a = _PyPegen_expect_token(p, 682)) // token='except'
&&
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
&&
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -25047,7 +25088,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
(a = _PyPegen_expect_soft_keyword(p, "_")) // soft_keyword='"_"'
)
@@ -25077,7 +25118,7 @@ invalid_as_pattern_rule(Parser *p)
if (
(or_pattern_var = or_pattern_rule(p)) // or_pattern
&&
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
(a = expression_rule(p)) // expression
)
@@ -25176,7 +25217,7 @@ invalid_class_argument_pattern_rule(Parser *p)
asdl_pattern_seq* a;
asdl_seq* keyword_patterns_var;
if (
- (_opt_var = _tmp_147_rule(p), !p->error_indicator) // [positional_patterns ',']
+ (_opt_var = _tmp_146_rule(p), !p->error_indicator) // [positional_patterns ',']
&&
(keyword_patterns_var = keyword_patterns_rule(p)) // keyword_patterns
&&
@@ -25229,7 +25270,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -25260,7 +25301,7 @@ invalid_if_stmt_rule(Parser *p)
expr_ty a_1;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 686)) // token='if'
+ (a = _PyPegen_expect_token(p, 687)) // token='if'
&&
(a_1 = named_expression_rule(p)) // named_expression
&&
@@ -25315,7 +25356,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 691)) // token='elif'
+ (_keyword = _PyPegen_expect_token(p, 692)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -25346,7 +25387,7 @@ invalid_elif_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 691)) // token='elif'
+ (a = _PyPegen_expect_token(p, 692)) // token='elif'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -25399,7 +25440,7 @@ invalid_else_stmt_rule(Parser *p)
Token * a;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 690)) // token='else'
+ (a = _PyPegen_expect_token(p, 691)) // token='else'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -25432,13 +25473,13 @@ invalid_else_stmt_rule(Parser *p)
Token * _literal;
asdl_stmt_seq* block_var;
if (
- (_keyword = _PyPegen_expect_token(p, 690)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='else'
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
(block_var = block_rule(p)) // block
&&
- (_keyword_1 = _PyPegen_expect_token(p, 691)) // token='elif'
+ (_keyword_1 = _PyPegen_expect_token(p, 692)) // token='elif'
)
{
D(fprintf(stderr, "%*c+ invalid_else_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':' block 'elif'"));
@@ -25485,7 +25526,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 693)) // token='while'
+ (_keyword = _PyPegen_expect_token(p, 694)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -25516,7 +25557,7 @@ invalid_while_stmt_rule(Parser *p)
expr_ty named_expression_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 693)) // token='while'
+ (a = _PyPegen_expect_token(p, 694)) // token='while'
&&
(named_expression_var = named_expression_rule(p)) // named_expression
&&
@@ -25575,13 +25616,13 @@ invalid_for_stmt_rule(Parser *p)
expr_ty star_expressions_var;
expr_ty star_targets_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 699)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword_1 = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword_1 = _PyPegen_expect_token(p, 700)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -25616,13 +25657,13 @@ invalid_for_stmt_rule(Parser *p)
expr_ty star_expressions_var;
expr_ty star_targets_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 698)) // token='for'
+ (a = _PyPegen_expect_token(p, 699)) // token='for'
&&
(star_targets_var = star_targets_rule(p)) // star_targets
&&
- (_keyword = _PyPegen_expect_token(p, 699)) // token='in'
+ (_keyword = _PyPegen_expect_token(p, 700)) // token='in'
&&
(star_expressions_var = star_expressions_rule(p)) // star_expressions
&&
@@ -25688,9 +25729,9 @@ invalid_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (a = _PyPegen_expect_token(p, 703)) // token='def'
+ (a = _PyPegen_expect_token(p, 704)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -25702,7 +25743,7 @@ invalid_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (_opt_var_3 = _tmp_27_rule(p), !p->error_indicator) // ['->' expression]
+ (_opt_var_3 = _tmp_26_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -25747,9 +25788,9 @@ invalid_def_raw_rule(Parser *p)
asdl_stmt_seq* block_var;
expr_ty name_var;
if (
- (_opt_var = _PyPegen_expect_token(p, 702), !p->error_indicator) // 'async'?
+ (_opt_var = _PyPegen_expect_token(p, 703), !p->error_indicator) // 'async'?
&&
- (_keyword = _PyPegen_expect_token(p, 703)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 704)) // token='def'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
@@ -25761,7 +25802,7 @@ invalid_def_raw_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
&&
- (_opt_var_3 = _tmp_27_rule(p), !p->error_indicator) // ['->' expression]
+ (_opt_var_3 = _tmp_26_rule(p), !p->error_indicator) // ['->' expression]
&&
(_literal_2 = _PyPegen_expect_forced_token(p, 11, ":")) // forced_token=':'
&&
@@ -25813,13 +25854,13 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (_keyword = _PyPegen_expect_token(p, 705)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 706)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
(_opt_var = type_params_rule(p), !p->error_indicator) // type_params?
&&
- (_opt_var_1 = _tmp_26_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (_opt_var_1 = _tmp_25_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
@@ -25852,13 +25893,13 @@ invalid_class_def_raw_rule(Parser *p)
expr_ty name_var;
Token * newline_var;
if (
- (a = _PyPegen_expect_token(p, 705)) // token='class'
+ (a = _PyPegen_expect_token(p, 706)) // token='class'
&&
(name_var = _PyPegen_name_token(p)) // NAME
&&
(_opt_var = type_params_rule(p), !p->error_indicator) // type_params?
&&
- (_opt_var_1 = _tmp_26_rule(p), !p->error_indicator) // ['(' arguments? ')']
+ (_opt_var_1 = _tmp_25_rule(p), !p->error_indicator) // ['(' arguments? ')']
&&
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
&&
@@ -25908,11 +25949,11 @@ invalid_double_starred_kvpairs_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair"));
- asdl_seq * _gather_84_var;
+ asdl_seq * _gather_83_var;
Token * _literal;
void *invalid_kvpair_var;
if (
- (_gather_84_var = _gather_84_rule(p)) // ','.double_starred_kvpair+
+ (_gather_83_var = _gather_83_rule(p)) // ','.double_starred_kvpair+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
@@ -25920,7 +25961,7 @@ invalid_double_starred_kvpairs_rule(Parser *p)
)
{
D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ',' invalid_kvpair"));
- _res = _PyPegen_dummy_name(p, _gather_84_var, _literal, invalid_kvpair_var);
+ _res = _PyPegen_dummy_name(p, _gather_83_var, _literal, invalid_kvpair_var);
goto done;
}
p->mark = _mark;
@@ -25973,7 +26014,7 @@ invalid_double_starred_kvpairs_rule(Parser *p)
&&
(a = _PyPegen_expect_token(p, 11)) // token=':'
&&
- _PyPegen_lookahead(1, _tmp_148_rule, p)
+ _PyPegen_lookahead(1, _tmp_147_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')"));
@@ -26083,7 +26124,7 @@ invalid_kvpair_rule(Parser *p)
&&
(a = _PyPegen_expect_token(p, 11)) // token=':'
&&
- _PyPegen_lookahead(1, _tmp_148_rule, p)
+ _PyPegen_lookahead(1, _tmp_147_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' &('}' | ',')"));
@@ -26371,7 +26412,7 @@ invalid_fstring_replacement_field_rule(Parser *p)
&&
(annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs
&&
- _PyPegen_lookahead(0, _tmp_149_rule, p)
+ _PyPegen_lookahead(0, _tmp_148_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs !('=' | '!' | ':' | '}')"));
@@ -26403,7 +26444,7 @@ invalid_fstring_replacement_field_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(0, _tmp_150_rule, p)
+ _PyPegen_lookahead(0, _tmp_149_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs '=' !('!' | ':' | '}')"));
@@ -26467,9 +26508,9 @@ invalid_fstring_replacement_field_rule(Parser *p)
&&
(_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
&&
- (_opt_var_1 = _tmp_151_rule(p), !p->error_indicator) // ['!' NAME]
+ (_opt_var_1 = _tmp_150_rule(p), !p->error_indicator) // ['!' NAME]
&&
- _PyPegen_lookahead(0, _tmp_152_rule, p)
+ _PyPegen_lookahead(0, _tmp_151_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_fstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs '='? ['!' NAME] !(':' | '}')"));
@@ -26493,7 +26534,7 @@ invalid_fstring_replacement_field_rule(Parser *p)
D(fprintf(stderr, "%*c> invalid_fstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_77_var;
+ asdl_seq * _loop0_76_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
void *_opt_var_1;
@@ -26506,11 +26547,11 @@ invalid_fstring_replacement_field_rule(Parser *p)
&&
(_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
&&
- (_opt_var_1 = _tmp_151_rule(p), !p->error_indicator) // ['!' NAME]
+ (_opt_var_1 = _tmp_150_rule(p), !p->error_indicator) // ['!' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_77_var = _loop0_77_rule(p)) // fstring_format_spec*
+ (_loop0_76_var = _loop0_76_rule(p)) // fstring_format_spec*
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
)
@@ -26547,7 +26588,7 @@ invalid_fstring_replacement_field_rule(Parser *p)
&&
(_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
&&
- (_opt_var_1 = _tmp_151_rule(p), !p->error_indicator) // ['!' NAME]
+ (_opt_var_1 = _tmp_150_rule(p), !p->error_indicator) // ['!' NAME]
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
)
@@ -26594,7 +26635,7 @@ invalid_fstring_conversion_character_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 54)) // token='!'
&&
- _PyPegen_lookahead(1, _tmp_152_rule, p)
+ _PyPegen_lookahead(1, _tmp_151_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_fstring_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')"));
@@ -26813,7 +26854,7 @@ invalid_tstring_replacement_field_rule(Parser *p)
&&
(annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs
&&
- _PyPegen_lookahead(0, _tmp_149_rule, p)
+ _PyPegen_lookahead(0, _tmp_148_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_tstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs !('=' | '!' | ':' | '}')"));
@@ -26845,7 +26886,7 @@ invalid_tstring_replacement_field_rule(Parser *p)
&&
(_literal_1 = _PyPegen_expect_token(p, 22)) // token='='
&&
- _PyPegen_lookahead(0, _tmp_150_rule, p)
+ _PyPegen_lookahead(0, _tmp_149_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_tstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs '=' !('!' | ':' | '}')"));
@@ -26909,9 +26950,9 @@ invalid_tstring_replacement_field_rule(Parser *p)
&&
(_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
&&
- (_opt_var_1 = _tmp_151_rule(p), !p->error_indicator) // ['!' NAME]
+ (_opt_var_1 = _tmp_150_rule(p), !p->error_indicator) // ['!' NAME]
&&
- _PyPegen_lookahead(0, _tmp_152_rule, p)
+ _PyPegen_lookahead(0, _tmp_151_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_tstring_replacement_field[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs '='? ['!' NAME] !(':' | '}')"));
@@ -26935,7 +26976,7 @@ invalid_tstring_replacement_field_rule(Parser *p)
D(fprintf(stderr, "%*c> invalid_tstring_replacement_field[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' annotated_rhs '='? ['!' NAME] ':' fstring_format_spec* !'}'"));
Token * _literal;
Token * _literal_1;
- asdl_seq * _loop0_77_var;
+ asdl_seq * _loop0_76_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
void *_opt_var_1;
@@ -26948,11 +26989,11 @@ invalid_tstring_replacement_field_rule(Parser *p)
&&
(_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
&&
- (_opt_var_1 = _tmp_151_rule(p), !p->error_indicator) // ['!' NAME]
+ (_opt_var_1 = _tmp_150_rule(p), !p->error_indicator) // ['!' NAME]
&&
(_literal_1 = _PyPegen_expect_token(p, 11)) // token=':'
&&
- (_loop0_77_var = _loop0_77_rule(p)) // fstring_format_spec*
+ (_loop0_76_var = _loop0_76_rule(p)) // fstring_format_spec*
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
)
@@ -26989,7 +27030,7 @@ invalid_tstring_replacement_field_rule(Parser *p)
&&
(_opt_var = _PyPegen_expect_token(p, 22), !p->error_indicator) // '='?
&&
- (_opt_var_1 = _tmp_151_rule(p), !p->error_indicator) // ['!' NAME]
+ (_opt_var_1 = _tmp_150_rule(p), !p->error_indicator) // ['!' NAME]
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 26) // token='}'
)
@@ -27036,7 +27077,7 @@ invalid_tstring_conversion_character_rule(Parser *p)
if (
(_literal = _PyPegen_expect_token(p, 54)) // token='!'
&&
- _PyPegen_lookahead(1, _tmp_152_rule, p)
+ _PyPegen_lookahead(1, _tmp_151_rule, p)
)
{
D(fprintf(stderr, "%*c+ invalid_tstring_conversion_character[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' &(':' | '}')"));
@@ -27103,16 +27144,16 @@ invalid_arithmetic_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_arithmetic[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum ('+' | '-' | '*' | '/' | '%' | '//' | '@') 'not' inversion"));
- void *_tmp_153_var;
+ void *_tmp_152_var;
Token * a;
expr_ty b;
expr_ty sum_var;
if (
(sum_var = sum_rule(p)) // sum
&&
- (_tmp_153_var = _tmp_153_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@'
+ (_tmp_152_var = _tmp_152_rule(p)) // '+' | '-' | '*' | '/' | '%' | '//' | '@'
&&
- (a = _PyPegen_expect_token(p, 707)) // token='not'
+ (a = _PyPegen_expect_token(p, 708)) // token='not'
&&
(b = inversion_rule(p)) // inversion
)
@@ -27155,13 +27196,13 @@ invalid_factor_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> invalid_factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('+' | '-' | '~') 'not' factor"));
- void *_tmp_154_var;
+ void *_tmp_153_var;
Token * a;
expr_ty b;
if (
- (_tmp_154_var = _tmp_154_rule(p)) // '+' | '-' | '~'
+ (_tmp_153_var = _tmp_153_rule(p)) // '+' | '-' | '~'
&&
- (a = _PyPegen_expect_token(p, 707)) // token='not'
+ (a = _PyPegen_expect_token(p, 708)) // token='not'
&&
(b = factor_rule(p)) // factor
)
@@ -27508,7 +27549,7 @@ _tmp_5_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 638)) // token='import'
+ (_keyword = _PyPegen_expect_token(p, 639)) // token='import'
)
{
D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'"));
@@ -27527,7 +27568,7 @@ _tmp_5_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='from'
+ (_keyword = _PyPegen_expect_token(p, 638)) // token='from'
)
{
D(fprintf(stderr, "%*c+ _tmp_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'"));
@@ -27565,7 +27606,7 @@ _tmp_6_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 703)) // token='def'
+ (_keyword = _PyPegen_expect_token(p, 704)) // token='def'
)
{
D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'"));
@@ -27603,7 +27644,7 @@ _tmp_6_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
)
{
D(fprintf(stderr, "%*c+ _tmp_6[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
@@ -27641,7 +27682,7 @@ _tmp_7_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 705)) // token='class'
+ (_keyword = _PyPegen_expect_token(p, 706)) // token='class'
)
{
D(fprintf(stderr, "%*c+ _tmp_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'"));
@@ -27698,7 +27739,7 @@ _tmp_8_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 651)) // token='with'
+ (_keyword = _PyPegen_expect_token(p, 652)) // token='with'
)
{
D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'"));
@@ -27717,7 +27758,7 @@ _tmp_8_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
)
{
D(fprintf(stderr, "%*c+ _tmp_8[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
@@ -27755,7 +27796,7 @@ _tmp_9_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 698)) // token='for'
+ (_keyword = _PyPegen_expect_token(p, 699)) // token='for'
)
{
D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'"));
@@ -27774,7 +27815,7 @@ _tmp_9_rule(Parser *p)
D(fprintf(stderr, "%*c> _tmp_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'async'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 702)) // token='async'
+ (_keyword = _PyPegen_expect_token(p, 703)) // token='async'
)
{
D(fprintf(stderr, "%*c+ _tmp_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'async'"));
@@ -27933,12 +27974,12 @@ _loop1_12_rule(Parser *p)
return NULL;
}
D(fprintf(stderr, "%*c> _loop1_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_155_var;
+ void *_tmp_154_var;
while (
- (_tmp_155_var = _tmp_155_rule(p)) // star_targets '='
+ (_tmp_154_var = _tmp_154_rule(p)) // star_targets '='
)
{
- _res = _tmp_155_var;
+ _res = _tmp_154_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -27977,55 +28018,9 @@ _loop1_12_rule(Parser *p)
return _seq;
}
-// _tmp_13: 'from' expression
-static void *
-_tmp_13_rule(Parser *p)
-{
- if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
- _Pypegen_stack_overflow(p);
- }
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- void * _res = NULL;
- int _mark = p->mark;
- { // 'from' expression
- if (p->error_indicator) {
- p->level--;
- return NULL;
- }
- D(fprintf(stderr, "%*c> _tmp_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression"));
- Token * _keyword;
- expr_ty z;
- if (
- (_keyword = _PyPegen_expect_token(p, 637)) // token='from'
- &&
- (z = expression_rule(p)) // expression
- )
- {
- D(fprintf(stderr, "%*c+ _tmp_13[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression"));
- _res = z;
- if (_res == NULL && PyErr_Occurred()) {
- p->error_indicator = 1;
- p->level--;
- return NULL;
- }
- goto done;
- }
- p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_13[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression"));
- }
- _res = NULL;
- done:
- p->level--;
- return _res;
-}
-
-// _loop0_14: ',' NAME
+// _loop0_13: ',' NAME
static asdl_seq *
-_loop0_14_rule(Parser *p)
+_loop0_13_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28050,7 +28045,7 @@ _loop0_14_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
+ D(fprintf(stderr, "%*c> _loop0_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME"));
Token * _literal;
expr_ty elem;
while (
@@ -28082,7 +28077,7 @@ _loop0_14_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_14[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_13[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28099,9 +28094,9 @@ _loop0_14_rule(Parser *p)
return _seq;
}
-// _gather_15: NAME _loop0_14
+// _gather_14: NAME _loop0_13
static asdl_seq *
-_gather_15_rule(Parser *p)
+_gather_14_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28112,27 +28107,27 @@ _gather_15_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // NAME _loop0_14
+ { // NAME _loop0_13
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_14"));
+ D(fprintf(stderr, "%*c> _gather_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_13"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = _PyPegen_name_token(p)) // NAME
&&
- (seq = _loop0_14_rule(p)) // _loop0_14
+ (seq = _loop0_13_rule(p)) // _loop0_13
)
{
- D(fprintf(stderr, "%*c+ _gather_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_14"));
+ D(fprintf(stderr, "%*c+ _gather_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_13"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_15[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_14"));
+ D(fprintf(stderr, "%*c%s _gather_14[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_13"));
}
_res = NULL;
done:
@@ -28140,9 +28135,9 @@ _gather_15_rule(Parser *p)
return _res;
}
-// _tmp_16: ';' | NEWLINE
+// _tmp_15: ';' | NEWLINE
static void *
-_tmp_16_rule(Parser *p)
+_tmp_15_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28158,18 +28153,18 @@ _tmp_16_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'"));
+ D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 13)) // token=';'
)
{
- D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'"));
+ D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'"));
}
{ // NEWLINE
@@ -28177,18 +28172,18 @@ _tmp_16_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
Token * newline_var;
if (
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
_res = newline_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE"));
}
_res = NULL;
@@ -28197,9 +28192,9 @@ _tmp_16_rule(Parser *p)
return _res;
}
-// _tmp_17: ',' expression
+// _tmp_16: ',' expression
static void *
-_tmp_17_rule(Parser *p)
+_tmp_16_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28215,7 +28210,7 @@ _tmp_17_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty z;
if (
@@ -28224,7 +28219,7 @@ _tmp_17_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -28234,7 +28229,7 @@ _tmp_17_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
_res = NULL;
@@ -28243,9 +28238,9 @@ _tmp_17_rule(Parser *p)
return _res;
}
-// _loop0_18: ('.' | '...')
+// _loop0_17: ('.' | '...')
static asdl_seq *
-_loop0_18_rule(Parser *p)
+_loop0_17_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28270,13 +28265,13 @@ _loop0_18_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
- void *_tmp_156_var;
+ D(fprintf(stderr, "%*c> _loop0_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
+ void *_tmp_155_var;
while (
- (_tmp_156_var = _tmp_156_rule(p)) // '.' | '...'
+ (_tmp_155_var = _tmp_155_rule(p)) // '.' | '...'
)
{
- _res = _tmp_156_var;
+ _res = _tmp_155_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28293,7 +28288,7 @@ _loop0_18_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_18[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_17[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28310,9 +28305,9 @@ _loop0_18_rule(Parser *p)
return _seq;
}
-// _loop1_19: ('.' | '...')
+// _loop1_18: ('.' | '...')
static asdl_seq *
-_loop1_19_rule(Parser *p)
+_loop1_18_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28337,13 +28332,13 @@ _loop1_19_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
- void *_tmp_156_var;
+ D(fprintf(stderr, "%*c> _loop1_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')"));
+ void *_tmp_155_var;
while (
- (_tmp_156_var = _tmp_156_rule(p)) // '.' | '...'
+ (_tmp_155_var = _tmp_155_rule(p)) // '.' | '...'
)
{
- _res = _tmp_156_var;
+ _res = _tmp_155_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28360,7 +28355,7 @@ _loop1_19_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_19[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_18[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')"));
}
if (_n == 0 || p->error_indicator) {
@@ -28382,9 +28377,9 @@ _loop1_19_rule(Parser *p)
return _seq;
}
-// _loop0_20: ',' import_from_as_name
+// _loop0_19: ',' import_from_as_name
static asdl_seq *
-_loop0_20_rule(Parser *p)
+_loop0_19_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28409,7 +28404,7 @@ _loop0_20_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name"));
+ D(fprintf(stderr, "%*c> _loop0_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name"));
Token * _literal;
alias_ty elem;
while (
@@ -28441,7 +28436,7 @@ _loop0_20_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_20[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_19[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28458,9 +28453,9 @@ _loop0_20_rule(Parser *p)
return _seq;
}
-// _gather_21: import_from_as_name _loop0_20
+// _gather_20: import_from_as_name _loop0_19
static asdl_seq *
-_gather_21_rule(Parser *p)
+_gather_20_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28471,27 +28466,27 @@ _gather_21_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // import_from_as_name _loop0_20
+ { // import_from_as_name _loop0_19
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_20"));
+ D(fprintf(stderr, "%*c> _gather_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_19"));
alias_ty elem;
asdl_seq * seq;
if (
(elem = import_from_as_name_rule(p)) // import_from_as_name
&&
- (seq = _loop0_20_rule(p)) // _loop0_20
+ (seq = _loop0_19_rule(p)) // _loop0_19
)
{
- D(fprintf(stderr, "%*c+ _gather_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_20"));
+ D(fprintf(stderr, "%*c+ _gather_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_19"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_21[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_20"));
+ D(fprintf(stderr, "%*c%s _gather_20[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_19"));
}
_res = NULL;
done:
@@ -28499,9 +28494,9 @@ _gather_21_rule(Parser *p)
return _res;
}
-// _tmp_22: 'as' NAME
+// _tmp_21: 'as' NAME
static void *
-_tmp_22_rule(Parser *p)
+_tmp_21_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28517,16 +28512,16 @@ _tmp_22_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
(z = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_22[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -28536,7 +28531,7 @@ _tmp_22_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_22[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_21[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME"));
}
_res = NULL;
@@ -28545,9 +28540,9 @@ _tmp_22_rule(Parser *p)
return _res;
}
-// _loop0_23: ',' dotted_as_name
+// _loop0_22: ',' dotted_as_name
static asdl_seq *
-_loop0_23_rule(Parser *p)
+_loop0_22_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28572,7 +28567,7 @@ _loop0_23_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name"));
+ D(fprintf(stderr, "%*c> _loop0_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name"));
Token * _literal;
alias_ty elem;
while (
@@ -28604,7 +28599,7 @@ _loop0_23_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_23[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_22[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28621,9 +28616,9 @@ _loop0_23_rule(Parser *p)
return _seq;
}
-// _gather_24: dotted_as_name _loop0_23
+// _gather_23: dotted_as_name _loop0_22
static asdl_seq *
-_gather_24_rule(Parser *p)
+_gather_23_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28634,27 +28629,27 @@ _gather_24_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // dotted_as_name _loop0_23
+ { // dotted_as_name _loop0_22
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_23"));
+ D(fprintf(stderr, "%*c> _gather_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_22"));
alias_ty elem;
asdl_seq * seq;
if (
(elem = dotted_as_name_rule(p)) // dotted_as_name
&&
- (seq = _loop0_23_rule(p)) // _loop0_23
+ (seq = _loop0_22_rule(p)) // _loop0_22
)
{
- D(fprintf(stderr, "%*c+ _gather_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_23"));
+ D(fprintf(stderr, "%*c+ _gather_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_22"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_24[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_23"));
+ D(fprintf(stderr, "%*c%s _gather_23[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_22"));
}
_res = NULL;
done:
@@ -28662,9 +28657,9 @@ _gather_24_rule(Parser *p)
return _res;
}
-// _loop1_25: ('@' named_expression NEWLINE)
+// _loop1_24: ('@' named_expression NEWLINE)
static asdl_seq *
-_loop1_25_rule(Parser *p)
+_loop1_24_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28689,13 +28684,13 @@ _loop1_25_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)"));
- void *_tmp_157_var;
+ D(fprintf(stderr, "%*c> _loop1_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)"));
+ void *_tmp_156_var;
while (
- (_tmp_157_var = _tmp_157_rule(p)) // '@' named_expression NEWLINE
+ (_tmp_156_var = _tmp_156_rule(p)) // '@' named_expression NEWLINE
)
{
- _res = _tmp_157_var;
+ _res = _tmp_156_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -28712,7 +28707,7 @@ _loop1_25_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_25[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_24[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)"));
}
if (_n == 0 || p->error_indicator) {
@@ -28734,9 +28729,9 @@ _loop1_25_rule(Parser *p)
return _seq;
}
-// _tmp_26: '(' arguments? ')'
+// _tmp_25: '(' arguments? ')'
static void *
-_tmp_26_rule(Parser *p)
+_tmp_25_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28752,7 +28747,7 @@ _tmp_26_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c> _tmp_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
Token * _literal;
Token * _literal_1;
void *z;
@@ -28764,7 +28759,7 @@ _tmp_26_rule(Parser *p)
(_literal_1 = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_26[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
+ D(fprintf(stderr, "%*c+ _tmp_25[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -28774,7 +28769,7 @@ _tmp_26_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_26[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_25[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'"));
}
_res = NULL;
@@ -28783,9 +28778,9 @@ _tmp_26_rule(Parser *p)
return _res;
}
-// _tmp_27: '->' expression
+// _tmp_26: '->' expression
static void *
-_tmp_27_rule(Parser *p)
+_tmp_26_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28801,7 +28796,7 @@ _tmp_27_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c> _tmp_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression"));
Token * _literal;
expr_ty z;
if (
@@ -28810,7 +28805,7 @@ _tmp_27_rule(Parser *p)
(z = expression_rule(p)) // expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_27[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
+ D(fprintf(stderr, "%*c+ _tmp_26[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -28820,7 +28815,7 @@ _tmp_27_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_27[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_26[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression"));
}
_res = NULL;
@@ -28829,9 +28824,9 @@ _tmp_27_rule(Parser *p)
return _res;
}
-// _loop0_28: param_no_default
+// _loop0_27: param_no_default
static asdl_seq *
-_loop0_28_rule(Parser *p)
+_loop0_27_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28856,7 +28851,7 @@ _loop0_28_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -28879,7 +28874,7 @@ _loop0_28_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_27[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28896,9 +28891,9 @@ _loop0_28_rule(Parser *p)
return _seq;
}
-// _loop0_29: param_with_default
+// _loop0_28: param_with_default
static asdl_seq *
-_loop0_29_rule(Parser *p)
+_loop0_28_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28923,7 +28918,7 @@ _loop0_29_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -28946,7 +28941,7 @@ _loop0_29_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_29[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -28963,9 +28958,9 @@ _loop0_29_rule(Parser *p)
return _seq;
}
-// _loop1_30: param_no_default
+// _loop1_29: param_no_default
static asdl_seq *
-_loop1_30_rule(Parser *p)
+_loop1_29_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -28990,7 +28985,7 @@ _loop1_30_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
while (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
@@ -29013,7 +29008,7 @@ _loop1_30_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_30[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_29[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -29035,9 +29030,9 @@ _loop1_30_rule(Parser *p)
return _seq;
}
-// _loop1_31: param_with_default
+// _loop1_30: param_with_default
static asdl_seq *
-_loop1_31_rule(Parser *p)
+_loop1_30_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29062,7 +29057,7 @@ _loop1_31_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default"));
NameDefaultPair* param_with_default_var;
while (
(param_with_default_var = param_with_default_rule(p)) // param_with_default
@@ -29085,7 +29080,7 @@ _loop1_31_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_31[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_30[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -29107,9 +29102,9 @@ _loop1_31_rule(Parser *p)
return _seq;
}
-// _loop0_32: param_maybe_default
+// _loop0_31: param_maybe_default
static asdl_seq *
-_loop0_32_rule(Parser *p)
+_loop0_31_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29134,7 +29129,7 @@ _loop0_32_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -29157,7 +29152,7 @@ _loop0_32_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_32[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_31[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -29174,9 +29169,9 @@ _loop0_32_rule(Parser *p)
return _seq;
}
-// _loop1_33: param_maybe_default
+// _loop1_32: param_maybe_default
static asdl_seq *
-_loop1_33_rule(Parser *p)
+_loop1_32_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29201,7 +29196,7 @@ _loop1_33_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default"));
NameDefaultPair* param_maybe_default_var;
while (
(param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default
@@ -29224,7 +29219,7 @@ _loop1_33_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_33[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_32[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -29246,9 +29241,9 @@ _loop1_33_rule(Parser *p)
return _seq;
}
-// _loop0_34: ',' with_item
+// _loop0_33: ',' with_item
static asdl_seq *
-_loop0_34_rule(Parser *p)
+_loop0_33_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29273,7 +29268,7 @@ _loop0_34_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
+ D(fprintf(stderr, "%*c> _loop0_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item"));
Token * _literal;
withitem_ty elem;
while (
@@ -29305,7 +29300,7 @@ _loop0_34_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_34[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_33[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -29322,9 +29317,9 @@ _loop0_34_rule(Parser *p)
return _seq;
}
-// _gather_35: with_item _loop0_34
+// _gather_34: with_item _loop0_33
static asdl_seq *
-_gather_35_rule(Parser *p)
+_gather_34_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29335,27 +29330,27 @@ _gather_35_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // with_item _loop0_34
+ { // with_item _loop0_33
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_34"));
+ D(fprintf(stderr, "%*c> _gather_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_33"));
withitem_ty elem;
asdl_seq * seq;
if (
(elem = with_item_rule(p)) // with_item
&&
- (seq = _loop0_34_rule(p)) // _loop0_34
+ (seq = _loop0_33_rule(p)) // _loop0_33
)
{
- D(fprintf(stderr, "%*c+ _gather_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_34"));
+ D(fprintf(stderr, "%*c+ _gather_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_33"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_35[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_34"));
+ D(fprintf(stderr, "%*c%s _gather_34[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_33"));
}
_res = NULL;
done:
@@ -29363,9 +29358,9 @@ _gather_35_rule(Parser *p)
return _res;
}
-// _tmp_36: ',' | ')' | ':'
+// _tmp_35: ',' | ')' | ':'
static void *
-_tmp_36_rule(Parser *p)
+_tmp_35_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29381,18 +29376,18 @@ _tmp_36_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_36[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_36[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -29400,18 +29395,18 @@ _tmp_36_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_36[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_36[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ':'
@@ -29419,18 +29414,18 @@ _tmp_36_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_36[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_36[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -29439,9 +29434,9 @@ _tmp_36_rule(Parser *p)
return _res;
}
-// _loop1_37: except_block
+// _loop1_36: except_block
static asdl_seq *
-_loop1_37_rule(Parser *p)
+_loop1_36_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29466,7 +29461,7 @@ _loop1_37_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
+ D(fprintf(stderr, "%*c> _loop1_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block"));
excepthandler_ty except_block_var;
while (
(except_block_var = except_block_rule(p)) // except_block
@@ -29489,7 +29484,7 @@ _loop1_37_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_37[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_36[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -29511,9 +29506,9 @@ _loop1_37_rule(Parser *p)
return _seq;
}
-// _loop1_38: except_star_block
+// _loop1_37: except_star_block
static asdl_seq *
-_loop1_38_rule(Parser *p)
+_loop1_37_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29538,7 +29533,7 @@ _loop1_38_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
+ D(fprintf(stderr, "%*c> _loop1_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_star_block"));
excepthandler_ty except_star_block_var;
while (
(except_star_block_var = except_star_block_rule(p)) // except_star_block
@@ -29561,7 +29556,7 @@ _loop1_38_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_38[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_37[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_star_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -29583,9 +29578,9 @@ _loop1_38_rule(Parser *p)
return _seq;
}
-// _loop1_39: case_block
+// _loop1_38: case_block
static asdl_seq *
-_loop1_39_rule(Parser *p)
+_loop1_38_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29610,7 +29605,7 @@ _loop1_39_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block"));
+ D(fprintf(stderr, "%*c> _loop1_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "case_block"));
match_case_ty case_block_var;
while (
(case_block_var = case_block_rule(p)) // case_block
@@ -29633,7 +29628,7 @@ _loop1_39_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_39[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_38[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "case_block"));
}
if (_n == 0 || p->error_indicator) {
@@ -29655,9 +29650,9 @@ _loop1_39_rule(Parser *p)
return _seq;
}
-// _loop0_40: '|' closed_pattern
+// _loop0_39: '|' closed_pattern
static asdl_seq *
-_loop0_40_rule(Parser *p)
+_loop0_39_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29682,7 +29677,7 @@ _loop0_40_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|' closed_pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -29714,7 +29709,7 @@ _loop0_40_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_40[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'|' closed_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -29731,9 +29726,9 @@ _loop0_40_rule(Parser *p)
return _seq;
}
-// _gather_41: closed_pattern _loop0_40
+// _gather_40: closed_pattern _loop0_39
static asdl_seq *
-_gather_41_rule(Parser *p)
+_gather_40_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29744,27 +29739,27 @@ _gather_41_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // closed_pattern _loop0_40
+ { // closed_pattern _loop0_39
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_40"));
+ D(fprintf(stderr, "%*c> _gather_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_39"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = closed_pattern_rule(p)) // closed_pattern
&&
- (seq = _loop0_40_rule(p)) // _loop0_40
+ (seq = _loop0_39_rule(p)) // _loop0_39
)
{
- D(fprintf(stderr, "%*c+ _gather_41[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_40"));
+ D(fprintf(stderr, "%*c+ _gather_40[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "closed_pattern _loop0_39"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_41[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_40"));
+ D(fprintf(stderr, "%*c%s _gather_40[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "closed_pattern _loop0_39"));
}
_res = NULL;
done:
@@ -29772,9 +29767,9 @@ _gather_41_rule(Parser *p)
return _res;
}
-// _tmp_42: '+' | '-'
+// _tmp_41: '+' | '-'
static void *
-_tmp_42_rule(Parser *p)
+_tmp_41_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29790,18 +29785,18 @@ _tmp_42_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
+ D(fprintf(stderr, "%*c> _tmp_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 14)) // token='+'
)
{
- D(fprintf(stderr, "%*c+ _tmp_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
+ D(fprintf(stderr, "%*c+ _tmp_41[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_42[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_41[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'"));
}
{ // '-'
@@ -29809,18 +29804,18 @@ _tmp_42_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c> _tmp_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 15)) // token='-'
)
{
- D(fprintf(stderr, "%*c+ _tmp_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c+ _tmp_41[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_42[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_41[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'"));
}
_res = NULL;
@@ -29829,9 +29824,9 @@ _tmp_42_rule(Parser *p)
return _res;
}
-// _tmp_43: STRING | FSTRING_START | TSTRING_START
+// _tmp_42: STRING | FSTRING_START | TSTRING_START
static void *
-_tmp_43_rule(Parser *p)
+_tmp_42_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29847,18 +29842,18 @@ _tmp_43_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
+ D(fprintf(stderr, "%*c> _tmp_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING"));
expr_ty string_var;
if (
(string_var = _PyPegen_string_token(p)) // STRING
)
{
- D(fprintf(stderr, "%*c+ _tmp_43[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING"));
+ D(fprintf(stderr, "%*c+ _tmp_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING"));
_res = string_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_43[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_42[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING"));
}
{ // FSTRING_START
@@ -29866,18 +29861,18 @@ _tmp_43_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
+ D(fprintf(stderr, "%*c> _tmp_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
Token * fstring_start_var;
if (
(fstring_start_var = _PyPegen_expect_token(p, FSTRING_START)) // token='FSTRING_START'
)
{
- D(fprintf(stderr, "%*c+ _tmp_43[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
+ D(fprintf(stderr, "%*c+ _tmp_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "FSTRING_START"));
_res = fstring_start_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_43[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_42[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "FSTRING_START"));
}
{ // TSTRING_START
@@ -29885,18 +29880,18 @@ _tmp_43_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TSTRING_START"));
+ D(fprintf(stderr, "%*c> _tmp_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TSTRING_START"));
Token * tstring_start_var;
if (
(tstring_start_var = _PyPegen_expect_token(p, TSTRING_START)) // token='TSTRING_START'
)
{
- D(fprintf(stderr, "%*c+ _tmp_43[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TSTRING_START"));
+ D(fprintf(stderr, "%*c+ _tmp_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TSTRING_START"));
_res = tstring_start_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_43[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_42[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "TSTRING_START"));
}
_res = NULL;
@@ -29905,9 +29900,9 @@ _tmp_43_rule(Parser *p)
return _res;
}
-// _tmp_44: '.' | '(' | '='
+// _tmp_43: '.' | '(' | '='
static void *
-_tmp_44_rule(Parser *p)
+_tmp_43_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -29923,18 +29918,18 @@ _tmp_44_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c> _tmp_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 23)) // token='.'
)
{
- D(fprintf(stderr, "%*c+ _tmp_44[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c+ _tmp_43[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_44[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_43[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
}
{ // '('
@@ -29942,18 +29937,18 @@ _tmp_44_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c> _tmp_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
)
{
- D(fprintf(stderr, "%*c+ _tmp_44[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c+ _tmp_43[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_44[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_43[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
}
{ // '='
@@ -29961,18 +29956,18 @@ _tmp_44_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_44[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_43[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_44[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_43[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
}
_res = NULL;
@@ -29981,9 +29976,9 @@ _tmp_44_rule(Parser *p)
return _res;
}
-// _loop0_45: ',' maybe_star_pattern
+// _loop0_44: ',' maybe_star_pattern
static asdl_seq *
-_loop0_45_rule(Parser *p)
+_loop0_44_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30008,7 +30003,7 @@ _loop0_45_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' maybe_star_pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -30040,7 +30035,7 @@ _loop0_45_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_44[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' maybe_star_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -30057,9 +30052,9 @@ _loop0_45_rule(Parser *p)
return _seq;
}
-// _gather_46: maybe_star_pattern _loop0_45
+// _gather_45: maybe_star_pattern _loop0_44
static asdl_seq *
-_gather_46_rule(Parser *p)
+_gather_45_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30070,27 +30065,27 @@ _gather_46_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // maybe_star_pattern _loop0_45
+ { // maybe_star_pattern _loop0_44
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_45"));
+ D(fprintf(stderr, "%*c> _gather_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_44"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = maybe_star_pattern_rule(p)) // maybe_star_pattern
&&
- (seq = _loop0_45_rule(p)) // _loop0_45
+ (seq = _loop0_44_rule(p)) // _loop0_44
)
{
- D(fprintf(stderr, "%*c+ _gather_46[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_45"));
+ D(fprintf(stderr, "%*c+ _gather_45[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "maybe_star_pattern _loop0_44"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_46[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_45"));
+ D(fprintf(stderr, "%*c%s _gather_45[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "maybe_star_pattern _loop0_44"));
}
_res = NULL;
done:
@@ -30098,9 +30093,9 @@ _gather_46_rule(Parser *p)
return _res;
}
-// _loop0_47: ',' key_value_pattern
+// _loop0_46: ',' key_value_pattern
static asdl_seq *
-_loop0_47_rule(Parser *p)
+_loop0_46_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30125,7 +30120,7 @@ _loop0_47_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' key_value_pattern"));
Token * _literal;
KeyPatternPair* elem;
while (
@@ -30157,7 +30152,7 @@ _loop0_47_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_47[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_46[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' key_value_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -30174,9 +30169,9 @@ _loop0_47_rule(Parser *p)
return _seq;
}
-// _gather_48: key_value_pattern _loop0_47
+// _gather_47: key_value_pattern _loop0_46
static asdl_seq *
-_gather_48_rule(Parser *p)
+_gather_47_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30187,27 +30182,27 @@ _gather_48_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // key_value_pattern _loop0_47
+ { // key_value_pattern _loop0_46
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_47"));
+ D(fprintf(stderr, "%*c> _gather_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_46"));
KeyPatternPair* elem;
asdl_seq * seq;
if (
(elem = key_value_pattern_rule(p)) // key_value_pattern
&&
- (seq = _loop0_47_rule(p)) // _loop0_47
+ (seq = _loop0_46_rule(p)) // _loop0_46
)
{
- D(fprintf(stderr, "%*c+ _gather_48[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_47"));
+ D(fprintf(stderr, "%*c+ _gather_47[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "key_value_pattern _loop0_46"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_48[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_47"));
+ D(fprintf(stderr, "%*c%s _gather_47[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "key_value_pattern _loop0_46"));
}
_res = NULL;
done:
@@ -30215,9 +30210,9 @@ _gather_48_rule(Parser *p)
return _res;
}
-// _tmp_49: literal_expr | attr
+// _tmp_48: literal_expr | attr
static void *
-_tmp_49_rule(Parser *p)
+_tmp_48_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30233,18 +30228,18 @@ _tmp_49_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr"));
+ D(fprintf(stderr, "%*c> _tmp_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "literal_expr"));
expr_ty literal_expr_var;
if (
(literal_expr_var = literal_expr_rule(p)) // literal_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_49[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_48[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "literal_expr"));
_res = literal_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_49[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_48[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "literal_expr"));
}
{ // attr
@@ -30252,18 +30247,18 @@ _tmp_49_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr"));
+ D(fprintf(stderr, "%*c> _tmp_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "attr"));
expr_ty attr_var;
if (
(attr_var = attr_rule(p)) // attr
)
{
- D(fprintf(stderr, "%*c+ _tmp_49[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr"));
+ D(fprintf(stderr, "%*c+ _tmp_48[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "attr"));
_res = attr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_49[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_48[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "attr"));
}
_res = NULL;
@@ -30272,9 +30267,9 @@ _tmp_49_rule(Parser *p)
return _res;
}
-// _loop0_50: ',' pattern
+// _loop0_49: ',' pattern
static asdl_seq *
-_loop0_50_rule(Parser *p)
+_loop0_49_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30299,7 +30294,7 @@ _loop0_50_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern"));
+ D(fprintf(stderr, "%*c> _loop0_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' pattern"));
Token * _literal;
pattern_ty elem;
while (
@@ -30331,7 +30326,7 @@ _loop0_50_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_50[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_49[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -30348,9 +30343,9 @@ _loop0_50_rule(Parser *p)
return _seq;
}
-// _gather_51: pattern _loop0_50
+// _gather_50: pattern _loop0_49
static asdl_seq *
-_gather_51_rule(Parser *p)
+_gather_50_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30361,27 +30356,27 @@ _gather_51_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // pattern _loop0_50
+ { // pattern _loop0_49
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_50"));
+ D(fprintf(stderr, "%*c> _gather_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pattern _loop0_49"));
pattern_ty elem;
asdl_seq * seq;
if (
(elem = pattern_rule(p)) // pattern
&&
- (seq = _loop0_50_rule(p)) // _loop0_50
+ (seq = _loop0_49_rule(p)) // _loop0_49
)
{
- D(fprintf(stderr, "%*c+ _gather_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_50"));
+ D(fprintf(stderr, "%*c+ _gather_50[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pattern _loop0_49"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_51[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_50"));
+ D(fprintf(stderr, "%*c%s _gather_50[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pattern _loop0_49"));
}
_res = NULL;
done:
@@ -30389,9 +30384,9 @@ _gather_51_rule(Parser *p)
return _res;
}
-// _loop0_52: ',' keyword_pattern
+// _loop0_51: ',' keyword_pattern
static asdl_seq *
-_loop0_52_rule(Parser *p)
+_loop0_51_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30416,7 +30411,7 @@ _loop0_52_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern"));
+ D(fprintf(stderr, "%*c> _loop0_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' keyword_pattern"));
Token * _literal;
KeyPatternPair* elem;
while (
@@ -30448,7 +30443,7 @@ _loop0_52_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_52[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_51[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' keyword_pattern"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -30465,9 +30460,9 @@ _loop0_52_rule(Parser *p)
return _seq;
}
-// _gather_53: keyword_pattern _loop0_52
+// _gather_52: keyword_pattern _loop0_51
static asdl_seq *
-_gather_53_rule(Parser *p)
+_gather_52_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30478,27 +30473,27 @@ _gather_53_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // keyword_pattern _loop0_52
+ { // keyword_pattern _loop0_51
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_52"));
+ D(fprintf(stderr, "%*c> _gather_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_51"));
KeyPatternPair* elem;
asdl_seq * seq;
if (
(elem = keyword_pattern_rule(p)) // keyword_pattern
&&
- (seq = _loop0_52_rule(p)) // _loop0_52
+ (seq = _loop0_51_rule(p)) // _loop0_51
)
{
- D(fprintf(stderr, "%*c+ _gather_53[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_52"));
+ D(fprintf(stderr, "%*c+ _gather_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "keyword_pattern _loop0_51"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_53[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_52"));
+ D(fprintf(stderr, "%*c%s _gather_52[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "keyword_pattern _loop0_51"));
}
_res = NULL;
done:
@@ -30506,9 +30501,9 @@ _gather_53_rule(Parser *p)
return _res;
}
-// _loop0_54: ',' type_param
+// _loop0_53: ',' type_param
static asdl_seq *
-_loop0_54_rule(Parser *p)
+_loop0_53_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30533,7 +30528,7 @@ _loop0_54_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' type_param"));
+ D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' type_param"));
Token * _literal;
type_param_ty elem;
while (
@@ -30565,7 +30560,7 @@ _loop0_54_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' type_param"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -30582,9 +30577,9 @@ _loop0_54_rule(Parser *p)
return _seq;
}
-// _gather_55: type_param _loop0_54
+// _gather_54: type_param _loop0_53
static asdl_seq *
-_gather_55_rule(Parser *p)
+_gather_54_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30595,27 +30590,27 @@ _gather_55_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // type_param _loop0_54
+ { // type_param _loop0_53
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "type_param _loop0_54"));
+ D(fprintf(stderr, "%*c> _gather_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "type_param _loop0_53"));
type_param_ty elem;
asdl_seq * seq;
if (
(elem = type_param_rule(p)) // type_param
&&
- (seq = _loop0_54_rule(p)) // _loop0_54
+ (seq = _loop0_53_rule(p)) // _loop0_53
)
{
- D(fprintf(stderr, "%*c+ _gather_55[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "type_param _loop0_54"));
+ D(fprintf(stderr, "%*c+ _gather_54[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "type_param _loop0_53"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_55[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "type_param _loop0_54"));
+ D(fprintf(stderr, "%*c%s _gather_54[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "type_param _loop0_53"));
}
_res = NULL;
done:
@@ -30623,9 +30618,9 @@ _gather_55_rule(Parser *p)
return _res;
}
-// _loop1_56: (',' expression)
+// _loop1_55: (',' expression)
static asdl_seq *
-_loop1_56_rule(Parser *p)
+_loop1_55_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30650,13 +30645,13 @@ _loop1_56_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)"));
- void *_tmp_17_var;
+ D(fprintf(stderr, "%*c> _loop1_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)"));
+ void *_tmp_16_var;
while (
- (_tmp_17_var = _tmp_17_rule(p)) // ',' expression
+ (_tmp_16_var = _tmp_16_rule(p)) // ',' expression
)
{
- _res = _tmp_17_var;
+ _res = _tmp_16_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30673,7 +30668,7 @@ _loop1_56_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_56[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_55[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)"));
}
if (_n == 0 || p->error_indicator) {
@@ -30695,9 +30690,9 @@ _loop1_56_rule(Parser *p)
return _seq;
}
-// _loop1_57: (',' star_expression)
+// _loop1_56: (',' star_expression)
static asdl_seq *
-_loop1_57_rule(Parser *p)
+_loop1_56_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30722,13 +30717,13 @@ _loop1_57_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)"));
- void *_tmp_158_var;
+ D(fprintf(stderr, "%*c> _loop1_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)"));
+ void *_tmp_157_var;
while (
- (_tmp_158_var = _tmp_158_rule(p)) // ',' star_expression
+ (_tmp_157_var = _tmp_157_rule(p)) // ',' star_expression
)
{
- _res = _tmp_158_var;
+ _res = _tmp_157_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30745,7 +30740,7 @@ _loop1_57_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_57[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_56[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)"));
}
if (_n == 0 || p->error_indicator) {
@@ -30767,9 +30762,9 @@ _loop1_57_rule(Parser *p)
return _seq;
}
-// _loop0_58: ',' star_named_expression
+// _loop0_57: ',' star_named_expression
static asdl_seq *
-_loop0_58_rule(Parser *p)
+_loop0_57_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30794,7 +30789,7 @@ _loop0_58_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression"));
+ D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression"));
Token * _literal;
expr_ty elem;
while (
@@ -30826,7 +30821,7 @@ _loop0_58_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_58[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -30843,9 +30838,9 @@ _loop0_58_rule(Parser *p)
return _seq;
}
-// _gather_59: star_named_expression _loop0_58
+// _gather_58: star_named_expression _loop0_57
static asdl_seq *
-_gather_59_rule(Parser *p)
+_gather_58_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30856,27 +30851,27 @@ _gather_59_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // star_named_expression _loop0_58
+ { // star_named_expression _loop0_57
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_58"));
+ D(fprintf(stderr, "%*c> _gather_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_57"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = star_named_expression_rule(p)) // star_named_expression
&&
- (seq = _loop0_58_rule(p)) // _loop0_58
+ (seq = _loop0_57_rule(p)) // _loop0_57
)
{
- D(fprintf(stderr, "%*c+ _gather_59[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_58"));
+ D(fprintf(stderr, "%*c+ _gather_58[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_57"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_59[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_58"));
+ D(fprintf(stderr, "%*c%s _gather_58[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_57"));
}
_res = NULL;
done:
@@ -30884,9 +30879,9 @@ _gather_59_rule(Parser *p)
return _res;
}
-// _loop1_60: ('or' conjunction)
+// _loop1_59: ('or' conjunction)
static asdl_seq *
-_loop1_60_rule(Parser *p)
+_loop1_59_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30911,13 +30906,13 @@ _loop1_60_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)"));
- void *_tmp_159_var;
+ D(fprintf(stderr, "%*c> _loop1_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)"));
+ void *_tmp_158_var;
while (
- (_tmp_159_var = _tmp_159_rule(p)) // 'or' conjunction
+ (_tmp_158_var = _tmp_158_rule(p)) // 'or' conjunction
)
{
- _res = _tmp_159_var;
+ _res = _tmp_158_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -30934,7 +30929,7 @@ _loop1_60_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_59[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)"));
}
if (_n == 0 || p->error_indicator) {
@@ -30956,9 +30951,9 @@ _loop1_60_rule(Parser *p)
return _seq;
}
-// _loop1_61: ('and' inversion)
+// _loop1_60: ('and' inversion)
static asdl_seq *
-_loop1_61_rule(Parser *p)
+_loop1_60_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -30983,13 +30978,13 @@ _loop1_61_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)"));
- void *_tmp_160_var;
+ D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)"));
+ void *_tmp_159_var;
while (
- (_tmp_160_var = _tmp_160_rule(p)) // 'and' inversion
+ (_tmp_159_var = _tmp_159_rule(p)) // 'and' inversion
)
{
- _res = _tmp_160_var;
+ _res = _tmp_159_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -31006,7 +31001,7 @@ _loop1_61_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)"));
}
if (_n == 0 || p->error_indicator) {
@@ -31028,9 +31023,9 @@ _loop1_61_rule(Parser *p)
return _seq;
}
-// _loop1_62: compare_op_bitwise_or_pair
+// _loop1_61: compare_op_bitwise_or_pair
static asdl_seq *
-_loop1_62_rule(Parser *p)
+_loop1_61_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31055,7 +31050,7 @@ _loop1_62_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair"));
+ D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair"));
CmpopExprPair* compare_op_bitwise_or_pair_var;
while (
(compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair
@@ -31078,7 +31073,7 @@ _loop1_62_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair"));
}
if (_n == 0 || p->error_indicator) {
@@ -31100,9 +31095,9 @@ _loop1_62_rule(Parser *p)
return _seq;
}
-// _tmp_63: '!='
+// _tmp_62: '!='
static void *
-_tmp_63_rule(Parser *p)
+_tmp_62_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31118,13 +31113,13 @@ _tmp_63_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='"));
+ D(fprintf(stderr, "%*c> _tmp_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='"));
Token * tok;
if (
(tok = _PyPegen_expect_token(p, 28)) // token='!='
)
{
- D(fprintf(stderr, "%*c+ _tmp_63[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='"));
+ D(fprintf(stderr, "%*c+ _tmp_62[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='"));
_res = _PyPegen_check_barry_as_flufl ( p , tok ) ? NULL : tok;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -31134,7 +31129,7 @@ _tmp_63_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_63[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_62[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='"));
}
_res = NULL;
@@ -31143,9 +31138,9 @@ _tmp_63_rule(Parser *p)
return _res;
}
-// _loop0_64: ',' (slice | starred_expression)
+// _loop0_63: ',' (slice | starred_expression)
static asdl_seq *
-_loop0_64_rule(Parser *p)
+_loop0_63_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31170,13 +31165,13 @@ _loop0_64_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)"));
+ D(fprintf(stderr, "%*c> _loop0_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (slice | starred_expression)"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_161_rule(p)) // slice | starred_expression
+ (elem = _tmp_160_rule(p)) // slice | starred_expression
)
{
_res = elem;
@@ -31202,7 +31197,7 @@ _loop0_64_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_64[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_63[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (slice | starred_expression)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31219,9 +31214,9 @@ _loop0_64_rule(Parser *p)
return _seq;
}
-// _gather_65: (slice | starred_expression) _loop0_64
+// _gather_64: (slice | starred_expression) _loop0_63
static asdl_seq *
-_gather_65_rule(Parser *p)
+_gather_64_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31232,27 +31227,27 @@ _gather_65_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (slice | starred_expression) _loop0_64
+ { // (slice | starred_expression) _loop0_63
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_64"));
+ D(fprintf(stderr, "%*c> _gather_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_63"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_161_rule(p)) // slice | starred_expression
+ (elem = _tmp_160_rule(p)) // slice | starred_expression
&&
- (seq = _loop0_64_rule(p)) // _loop0_64
+ (seq = _loop0_63_rule(p)) // _loop0_63
)
{
- D(fprintf(stderr, "%*c+ _gather_65[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_64"));
+ D(fprintf(stderr, "%*c+ _gather_64[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(slice | starred_expression) _loop0_63"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_65[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_64"));
+ D(fprintf(stderr, "%*c%s _gather_64[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(slice | starred_expression) _loop0_63"));
}
_res = NULL;
done:
@@ -31260,9 +31255,9 @@ _gather_65_rule(Parser *p)
return _res;
}
-// _tmp_66: ':' expression?
+// _tmp_65: ':' expression?
static void *
-_tmp_66_rule(Parser *p)
+_tmp_65_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31278,7 +31273,7 @@ _tmp_66_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?"));
+ D(fprintf(stderr, "%*c> _tmp_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?"));
Token * _literal;
void *d;
if (
@@ -31287,7 +31282,7 @@ _tmp_66_rule(Parser *p)
(d = expression_rule(p), !p->error_indicator) // expression?
)
{
- D(fprintf(stderr, "%*c+ _tmp_66[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?"));
+ D(fprintf(stderr, "%*c+ _tmp_65[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?"));
_res = d;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -31297,7 +31292,7 @@ _tmp_66_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_66[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_65[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?"));
}
_res = NULL;
@@ -31306,9 +31301,9 @@ _tmp_66_rule(Parser *p)
return _res;
}
-// _tmp_67: tuple | group | genexp
+// _tmp_66: tuple | group | genexp
static void *
-_tmp_67_rule(Parser *p)
+_tmp_66_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31324,18 +31319,18 @@ _tmp_67_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c> _tmp_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
expr_ty tuple_var;
if (
(tuple_var = tuple_rule(p)) // tuple
)
{
- D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c+ _tmp_66[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
_res = tuple_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_66[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple"));
}
{ // group
@@ -31343,18 +31338,18 @@ _tmp_67_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group"));
+ D(fprintf(stderr, "%*c> _tmp_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group"));
expr_ty group_var;
if (
(group_var = group_rule(p)) // group
)
{
- D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group"));
+ D(fprintf(stderr, "%*c+ _tmp_66[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group"));
_res = group_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_66[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group"));
}
{ // genexp
@@ -31362,18 +31357,18 @@ _tmp_67_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c> _tmp_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
expr_ty genexp_var;
if (
(genexp_var = genexp_rule(p)) // genexp
)
{
- D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c+ _tmp_66[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
_res = genexp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_66[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp"));
}
_res = NULL;
@@ -31382,9 +31377,9 @@ _tmp_67_rule(Parser *p)
return _res;
}
-// _tmp_68: list | listcomp
+// _tmp_67: list | listcomp
static void *
-_tmp_68_rule(Parser *p)
+_tmp_67_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31400,18 +31395,18 @@ _tmp_68_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
expr_ty list_var;
if (
(list_var = list_rule(p)) // list
)
{
- D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
_res = list_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list"));
}
{ // listcomp
@@ -31419,18 +31414,18 @@ _tmp_68_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp"));
+ D(fprintf(stderr, "%*c> _tmp_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp"));
expr_ty listcomp_var;
if (
(listcomp_var = listcomp_rule(p)) // listcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_67[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp"));
_res = listcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_67[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp"));
}
_res = NULL;
@@ -31439,9 +31434,9 @@ _tmp_68_rule(Parser *p)
return _res;
}
-// _tmp_69: dict | set | dictcomp | setcomp
+// _tmp_68: dict | set | dictcomp | setcomp
static void *
-_tmp_69_rule(Parser *p)
+_tmp_68_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31457,18 +31452,18 @@ _tmp_69_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict"));
+ D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict"));
expr_ty dict_var;
if (
(dict_var = dict_rule(p)) // dict
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict"));
+ D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict"));
_res = dict_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict"));
}
{ // set
@@ -31476,18 +31471,18 @@ _tmp_69_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set"));
+ D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set"));
expr_ty set_var;
if (
(set_var = set_rule(p)) // set
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set"));
+ D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set"));
_res = set_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set"));
}
{ // dictcomp
@@ -31495,18 +31490,18 @@ _tmp_69_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp"));
+ D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp"));
expr_ty dictcomp_var;
if (
(dictcomp_var = dictcomp_rule(p)) // dictcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp"));
_res = dictcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp"));
}
{ // setcomp
@@ -31514,18 +31509,18 @@ _tmp_69_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp"));
+ D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp"));
expr_ty setcomp_var;
if (
(setcomp_var = setcomp_rule(p)) // setcomp
)
{
- D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp"));
+ D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp"));
_res = setcomp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp"));
}
_res = NULL;
@@ -31534,9 +31529,9 @@ _tmp_69_rule(Parser *p)
return _res;
}
-// _tmp_70: yield_expr | named_expression
+// _tmp_69: yield_expr | named_expression
static void *
-_tmp_70_rule(Parser *p)
+_tmp_69_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31552,18 +31547,18 @@ _tmp_70_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr"));
expr_ty yield_expr_var;
if (
(yield_expr_var = yield_expr_rule(p)) // yield_expr
)
{
- D(fprintf(stderr, "%*c+ _tmp_70[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
+ D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr"));
_res = yield_expr_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_70[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr"));
}
{ // named_expression
@@ -31571,18 +31566,18 @@ _tmp_70_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression"));
+ D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression"));
expr_ty named_expression_var;
if (
(named_expression_var = named_expression_rule(p)) // named_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_70[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression"));
_res = named_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_70[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression"));
}
_res = NULL;
@@ -31591,9 +31586,9 @@ _tmp_70_rule(Parser *p)
return _res;
}
-// _loop0_71: lambda_param_no_default
+// _loop0_70: lambda_param_no_default
static asdl_seq *
-_loop0_71_rule(Parser *p)
+_loop0_70_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31618,7 +31613,7 @@ _loop0_71_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop0_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -31641,7 +31636,7 @@ _loop0_71_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_71[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_70[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31658,9 +31653,9 @@ _loop0_71_rule(Parser *p)
return _seq;
}
-// _loop0_72: lambda_param_with_default
+// _loop0_71: lambda_param_with_default
static asdl_seq *
-_loop0_72_rule(Parser *p)
+_loop0_71_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31685,7 +31680,7 @@ _loop0_72_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ D(fprintf(stderr, "%*c> _loop0_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
NameDefaultPair* lambda_param_with_default_var;
while (
(lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
@@ -31708,7 +31703,7 @@ _loop0_72_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_72[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_71[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31725,9 +31720,9 @@ _loop0_72_rule(Parser *p)
return _seq;
}
-// _loop1_73: lambda_param_no_default
+// _loop1_72: lambda_param_no_default
static asdl_seq *
-_loop1_73_rule(Parser *p)
+_loop1_72_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31752,7 +31747,7 @@ _loop1_73_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _loop1_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
while (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
@@ -31775,7 +31770,7 @@ _loop1_73_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_73[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_72[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -31797,9 +31792,9 @@ _loop1_73_rule(Parser *p)
return _seq;
}
-// _loop1_74: lambda_param_with_default
+// _loop1_73: lambda_param_with_default
static asdl_seq *
-_loop1_74_rule(Parser *p)
+_loop1_73_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31824,7 +31819,7 @@ _loop1_74_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
+ D(fprintf(stderr, "%*c> _loop1_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default"));
NameDefaultPair* lambda_param_with_default_var;
while (
(lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default
@@ -31847,7 +31842,7 @@ _loop1_74_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_74[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_73[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -31869,9 +31864,9 @@ _loop1_74_rule(Parser *p)
return _seq;
}
-// _loop0_75: lambda_param_maybe_default
+// _loop0_74: lambda_param_maybe_default
static asdl_seq *
-_loop0_75_rule(Parser *p)
+_loop0_74_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31896,7 +31891,7 @@ _loop0_75_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop0_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -31919,7 +31914,7 @@ _loop0_75_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_74[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -31936,9 +31931,9 @@ _loop0_75_rule(Parser *p)
return _seq;
}
-// _loop1_76: lambda_param_maybe_default
+// _loop1_75: lambda_param_maybe_default
static asdl_seq *
-_loop1_76_rule(Parser *p)
+_loop1_75_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -31963,7 +31958,7 @@ _loop1_76_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
+ D(fprintf(stderr, "%*c> _loop1_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default"));
NameDefaultPair* lambda_param_maybe_default_var;
while (
(lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default
@@ -31986,7 +31981,7 @@ _loop1_76_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_76[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_75[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default"));
}
if (_n == 0 || p->error_indicator) {
@@ -32008,9 +32003,9 @@ _loop1_76_rule(Parser *p)
return _seq;
}
-// _loop0_77: fstring_format_spec
+// _loop0_76: fstring_format_spec
static asdl_seq *
-_loop0_77_rule(Parser *p)
+_loop0_76_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32035,7 +32030,7 @@ _loop0_77_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec"));
+ D(fprintf(stderr, "%*c> _loop0_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_format_spec"));
expr_ty fstring_format_spec_var;
while (
(fstring_format_spec_var = fstring_format_spec_rule(p)) // fstring_format_spec
@@ -32058,7 +32053,7 @@ _loop0_77_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_76[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_format_spec"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32075,9 +32070,9 @@ _loop0_77_rule(Parser *p)
return _seq;
}
-// _loop0_78: fstring_middle
+// _loop0_77: fstring_middle
static asdl_seq *
-_loop0_78_rule(Parser *p)
+_loop0_77_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32102,7 +32097,7 @@ _loop0_78_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_middle"));
+ D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring_middle"));
expr_ty fstring_middle_var;
while (
(fstring_middle_var = fstring_middle_rule(p)) // fstring_middle
@@ -32125,7 +32120,7 @@ _loop0_78_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_78[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring_middle"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32142,9 +32137,9 @@ _loop0_78_rule(Parser *p)
return _seq;
}
-// _loop0_79: tstring_format_spec
+// _loop0_78: tstring_format_spec
static asdl_seq *
-_loop0_79_rule(Parser *p)
+_loop0_78_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32169,7 +32164,7 @@ _loop0_79_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tstring_format_spec"));
+ D(fprintf(stderr, "%*c> _loop0_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tstring_format_spec"));
expr_ty tstring_format_spec_var;
while (
(tstring_format_spec_var = tstring_format_spec_rule(p)) // tstring_format_spec
@@ -32192,7 +32187,7 @@ _loop0_79_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_78[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tstring_format_spec"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32209,9 +32204,9 @@ _loop0_79_rule(Parser *p)
return _seq;
}
-// _loop0_80: tstring_middle
+// _loop0_79: tstring_middle
static asdl_seq *
-_loop0_80_rule(Parser *p)
+_loop0_79_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32236,7 +32231,7 @@ _loop0_80_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tstring_middle"));
+ D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tstring_middle"));
expr_ty tstring_middle_var;
while (
(tstring_middle_var = tstring_middle_rule(p)) // tstring_middle
@@ -32259,7 +32254,7 @@ _loop0_80_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_80[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tstring_middle"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32276,9 +32271,9 @@ _loop0_80_rule(Parser *p)
return _seq;
}
-// _loop1_81: (fstring | string | tstring)
+// _loop1_80: (fstring | string | tstring)
static asdl_seq *
-_loop1_81_rule(Parser *p)
+_loop1_80_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32303,13 +32298,13 @@ _loop1_81_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(fstring | string | tstring)"));
- void *_tmp_162_var;
+ D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(fstring | string | tstring)"));
+ void *_tmp_161_var;
while (
- (_tmp_162_var = _tmp_162_rule(p)) // fstring | string | tstring
+ (_tmp_161_var = _tmp_161_rule(p)) // fstring | string | tstring
)
{
- _res = _tmp_162_var;
+ _res = _tmp_161_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -32326,7 +32321,7 @@ _loop1_81_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(fstring | string | tstring)"));
}
if (_n == 0 || p->error_indicator) {
@@ -32348,9 +32343,9 @@ _loop1_81_rule(Parser *p)
return _seq;
}
-// _tmp_82: star_named_expression ',' star_named_expressions?
+// _tmp_81: star_named_expression ',' star_named_expressions?
static void *
-_tmp_82_rule(Parser *p)
+_tmp_81_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32366,7 +32361,7 @@ _tmp_82_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
+ D(fprintf(stderr, "%*c> _tmp_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
Token * _literal;
expr_ty y;
void *z;
@@ -32378,7 +32373,7 @@ _tmp_82_rule(Parser *p)
(z = star_named_expressions_rule(p), !p->error_indicator) // star_named_expressions?
)
{
- D(fprintf(stderr, "%*c+ _tmp_82[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
+ D(fprintf(stderr, "%*c+ _tmp_81[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
_res = _PyPegen_seq_insert_in_front ( p , y , z );
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -32388,7 +32383,7 @@ _tmp_82_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_82[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_81[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?"));
}
_res = NULL;
@@ -32397,9 +32392,9 @@ _tmp_82_rule(Parser *p)
return _res;
}
-// _loop0_83: ',' double_starred_kvpair
+// _loop0_82: ',' double_starred_kvpair
static asdl_seq *
-_loop0_83_rule(Parser *p)
+_loop0_82_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32424,7 +32419,7 @@ _loop0_83_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
+ D(fprintf(stderr, "%*c> _loop0_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair"));
Token * _literal;
KeyValuePair* elem;
while (
@@ -32456,7 +32451,7 @@ _loop0_83_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_82[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32473,9 +32468,9 @@ _loop0_83_rule(Parser *p)
return _seq;
}
-// _gather_84: double_starred_kvpair _loop0_83
+// _gather_83: double_starred_kvpair _loop0_82
static asdl_seq *
-_gather_84_rule(Parser *p)
+_gather_83_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32486,27 +32481,27 @@ _gather_84_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // double_starred_kvpair _loop0_83
+ { // double_starred_kvpair _loop0_82
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_83"));
+ D(fprintf(stderr, "%*c> _gather_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_82"));
KeyValuePair* elem;
asdl_seq * seq;
if (
(elem = double_starred_kvpair_rule(p)) // double_starred_kvpair
&&
- (seq = _loop0_83_rule(p)) // _loop0_83
+ (seq = _loop0_82_rule(p)) // _loop0_82
)
{
- D(fprintf(stderr, "%*c+ _gather_84[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_83"));
+ D(fprintf(stderr, "%*c+ _gather_83[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_82"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_84[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_83"));
+ D(fprintf(stderr, "%*c%s _gather_83[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_82"));
}
_res = NULL;
done:
@@ -32514,9 +32509,9 @@ _gather_84_rule(Parser *p)
return _res;
}
-// _loop1_85: for_if_clause
+// _loop1_84: for_if_clause
static asdl_seq *
-_loop1_85_rule(Parser *p)
+_loop1_84_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32541,7 +32536,7 @@ _loop1_85_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause"));
+ D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause"));
comprehension_ty for_if_clause_var;
while (
(for_if_clause_var = for_if_clause_rule(p)) // for_if_clause
@@ -32564,7 +32559,7 @@ _loop1_85_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause"));
}
if (_n == 0 || p->error_indicator) {
@@ -32586,9 +32581,9 @@ _loop1_85_rule(Parser *p)
return _seq;
}
-// _loop0_86: ('if' disjunction)
+// _loop0_85: ('if' disjunction)
static asdl_seq *
-_loop0_86_rule(Parser *p)
+_loop0_85_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32613,13 +32608,13 @@ _loop0_86_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
- void *_tmp_163_var;
+ D(fprintf(stderr, "%*c> _loop0_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)"));
+ void *_tmp_162_var;
while (
- (_tmp_163_var = _tmp_163_rule(p)) // 'if' disjunction
+ (_tmp_162_var = _tmp_162_rule(p)) // 'if' disjunction
)
{
- _res = _tmp_163_var;
+ _res = _tmp_162_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -32636,7 +32631,7 @@ _loop0_86_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_86[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_85[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32653,9 +32648,9 @@ _loop0_86_rule(Parser *p)
return _seq;
}
-// _tmp_87: assignment_expression | expression !':='
+// _tmp_86: assignment_expression | expression !':='
static void *
-_tmp_87_rule(Parser *p)
+_tmp_86_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32671,18 +32666,18 @@ _tmp_87_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ D(fprintf(stderr, "%*c> _tmp_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
expr_ty assignment_expression_var;
if (
(assignment_expression_var = assignment_expression_rule(p)) // assignment_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_87[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_86[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment_expression"));
_res = assignment_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_87[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_86[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment_expression"));
}
{ // expression !':='
@@ -32690,7 +32685,7 @@ _tmp_87_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c> _tmp_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='"));
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
@@ -32698,12 +32693,12 @@ _tmp_87_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_87[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
+ D(fprintf(stderr, "%*c+ _tmp_86[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='"));
_res = expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_87[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_86[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='"));
}
_res = NULL;
@@ -32712,9 +32707,9 @@ _tmp_87_rule(Parser *p)
return _res;
}
-// _loop0_88: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
+// _loop0_87: ',' (starred_expression | (assignment_expression | expression !':=') !'=')
static asdl_seq *
-_loop0_88_rule(Parser *p)
+_loop0_87_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32739,13 +32734,13 @@ _loop0_88_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
+ D(fprintf(stderr, "%*c> _loop0_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_164_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
+ (elem = _tmp_163_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
)
{
_res = elem;
@@ -32771,7 +32766,7 @@ _loop0_88_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_88[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_87[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression | (assignment_expression | expression !':=') !'=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32788,10 +32783,10 @@ _loop0_88_rule(Parser *p)
return _seq;
}
-// _gather_89:
-// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_88
+// _gather_88:
+// | (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_87
static asdl_seq *
-_gather_89_rule(Parser *p)
+_gather_88_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32802,27 +32797,27 @@ _gather_89_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_88
+ { // (starred_expression | (assignment_expression | expression !':=') !'=') _loop0_87
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_88"));
+ D(fprintf(stderr, "%*c> _gather_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_87"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_164_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
+ (elem = _tmp_163_rule(p)) // starred_expression | (assignment_expression | expression !':=') !'='
&&
- (seq = _loop0_88_rule(p)) // _loop0_88
+ (seq = _loop0_87_rule(p)) // _loop0_87
)
{
- D(fprintf(stderr, "%*c+ _gather_89[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_88"));
+ D(fprintf(stderr, "%*c+ _gather_88[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_87"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_89[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_88"));
+ D(fprintf(stderr, "%*c%s _gather_88[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression | (assignment_expression | expression !':=') !'=') _loop0_87"));
}
_res = NULL;
done:
@@ -32830,9 +32825,9 @@ _gather_89_rule(Parser *p)
return _res;
}
-// _tmp_90: ',' kwargs
+// _tmp_89: ',' kwargs
static void *
-_tmp_90_rule(Parser *p)
+_tmp_89_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32848,7 +32843,7 @@ _tmp_90_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
+ D(fprintf(stderr, "%*c> _tmp_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
Token * _literal;
asdl_seq* k;
if (
@@ -32857,7 +32852,7 @@ _tmp_90_rule(Parser *p)
(k = kwargs_rule(p)) // kwargs
)
{
- D(fprintf(stderr, "%*c+ _tmp_90[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
+ D(fprintf(stderr, "%*c+ _tmp_89[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' kwargs"));
_res = k;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -32867,7 +32862,7 @@ _tmp_90_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_90[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_89[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwargs"));
}
_res = NULL;
@@ -32876,9 +32871,9 @@ _tmp_90_rule(Parser *p)
return _res;
}
-// _loop0_91: ',' kwarg_or_starred
+// _loop0_90: ',' kwarg_or_starred
static asdl_seq *
-_loop0_91_rule(Parser *p)
+_loop0_90_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32903,7 +32898,7 @@ _loop0_91_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred"));
+ D(fprintf(stderr, "%*c> _loop0_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred"));
Token * _literal;
KeywordOrStarred* elem;
while (
@@ -32935,7 +32930,7 @@ _loop0_91_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_91[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_90[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -32952,9 +32947,9 @@ _loop0_91_rule(Parser *p)
return _seq;
}
-// _gather_92: kwarg_or_starred _loop0_91
+// _gather_91: kwarg_or_starred _loop0_90
static asdl_seq *
-_gather_92_rule(Parser *p)
+_gather_91_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -32965,27 +32960,27 @@ _gather_92_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // kwarg_or_starred _loop0_91
+ { // kwarg_or_starred _loop0_90
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_91"));
+ D(fprintf(stderr, "%*c> _gather_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_90"));
KeywordOrStarred* elem;
asdl_seq * seq;
if (
(elem = kwarg_or_starred_rule(p)) // kwarg_or_starred
&&
- (seq = _loop0_91_rule(p)) // _loop0_91
+ (seq = _loop0_90_rule(p)) // _loop0_90
)
{
- D(fprintf(stderr, "%*c+ _gather_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_91"));
+ D(fprintf(stderr, "%*c+ _gather_91[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_90"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_92[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_91"));
+ D(fprintf(stderr, "%*c%s _gather_91[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_90"));
}
_res = NULL;
done:
@@ -32993,9 +32988,9 @@ _gather_92_rule(Parser *p)
return _res;
}
-// _loop0_93: ',' kwarg_or_double_starred
+// _loop0_92: ',' kwarg_or_double_starred
static asdl_seq *
-_loop0_93_rule(Parser *p)
+_loop0_92_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33020,7 +33015,7 @@ _loop0_93_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred"));
+ D(fprintf(stderr, "%*c> _loop0_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred"));
Token * _literal;
KeywordOrStarred* elem;
while (
@@ -33052,7 +33047,7 @@ _loop0_93_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_93[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_92[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33069,9 +33064,9 @@ _loop0_93_rule(Parser *p)
return _seq;
}
-// _gather_94: kwarg_or_double_starred _loop0_93
+// _gather_93: kwarg_or_double_starred _loop0_92
static asdl_seq *
-_gather_94_rule(Parser *p)
+_gather_93_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33082,27 +33077,27 @@ _gather_94_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // kwarg_or_double_starred _loop0_93
+ { // kwarg_or_double_starred _loop0_92
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_93"));
+ D(fprintf(stderr, "%*c> _gather_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_92"));
KeywordOrStarred* elem;
asdl_seq * seq;
if (
(elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred
&&
- (seq = _loop0_93_rule(p)) // _loop0_93
+ (seq = _loop0_92_rule(p)) // _loop0_92
)
{
- D(fprintf(stderr, "%*c+ _gather_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_93"));
+ D(fprintf(stderr, "%*c+ _gather_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_92"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_94[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_93"));
+ D(fprintf(stderr, "%*c%s _gather_93[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_92"));
}
_res = NULL;
done:
@@ -33110,9 +33105,9 @@ _gather_94_rule(Parser *p)
return _res;
}
-// _loop0_95: (',' star_target)
+// _loop0_94: (',' star_target)
static asdl_seq *
-_loop0_95_rule(Parser *p)
+_loop0_94_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33137,13 +33132,13 @@ _loop0_95_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
- void *_tmp_165_var;
+ D(fprintf(stderr, "%*c> _loop0_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
+ void *_tmp_164_var;
while (
- (_tmp_165_var = _tmp_165_rule(p)) // ',' star_target
+ (_tmp_164_var = _tmp_164_rule(p)) // ',' star_target
)
{
- _res = _tmp_165_var;
+ _res = _tmp_164_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -33160,7 +33155,7 @@ _loop0_95_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_95[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_94[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33177,9 +33172,9 @@ _loop0_95_rule(Parser *p)
return _seq;
}
-// _loop0_96: ',' star_target
+// _loop0_95: ',' star_target
static asdl_seq *
-_loop0_96_rule(Parser *p)
+_loop0_95_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33204,7 +33199,7 @@ _loop0_96_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _loop0_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty elem;
while (
@@ -33236,7 +33231,7 @@ _loop0_96_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_96[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_95[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33253,9 +33248,9 @@ _loop0_96_rule(Parser *p)
return _seq;
}
-// _gather_97: star_target _loop0_96
+// _gather_96: star_target _loop0_95
static asdl_seq *
-_gather_97_rule(Parser *p)
+_gather_96_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33266,27 +33261,27 @@ _gather_97_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // star_target _loop0_96
+ { // star_target _loop0_95
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_96"));
+ D(fprintf(stderr, "%*c> _gather_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_95"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = star_target_rule(p)) // star_target
&&
- (seq = _loop0_96_rule(p)) // _loop0_96
+ (seq = _loop0_95_rule(p)) // _loop0_95
)
{
- D(fprintf(stderr, "%*c+ _gather_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_96"));
+ D(fprintf(stderr, "%*c+ _gather_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_95"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_97[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_96"));
+ D(fprintf(stderr, "%*c%s _gather_96[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_95"));
}
_res = NULL;
done:
@@ -33294,9 +33289,9 @@ _gather_97_rule(Parser *p)
return _res;
}
-// _loop1_98: (',' star_target)
+// _loop1_97: (',' star_target)
static asdl_seq *
-_loop1_98_rule(Parser *p)
+_loop1_97_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33321,13 +33316,13 @@ _loop1_98_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
- void *_tmp_165_var;
+ D(fprintf(stderr, "%*c> _loop1_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)"));
+ void *_tmp_164_var;
while (
- (_tmp_165_var = _tmp_165_rule(p)) // ',' star_target
+ (_tmp_164_var = _tmp_164_rule(p)) // ',' star_target
)
{
- _res = _tmp_165_var;
+ _res = _tmp_164_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -33344,7 +33339,7 @@ _loop1_98_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_98[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_97[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)"));
}
if (_n == 0 || p->error_indicator) {
@@ -33366,9 +33361,9 @@ _loop1_98_rule(Parser *p)
return _seq;
}
-// _tmp_99: !'*' star_target
+// _tmp_98: !'*' star_target
static void *
-_tmp_99_rule(Parser *p)
+_tmp_98_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33384,7 +33379,7 @@ _tmp_99_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
expr_ty star_target_var;
if (
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*'
@@ -33392,12 +33387,12 @@ _tmp_99_rule(Parser *p)
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_99[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target"));
_res = star_target_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_99[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target"));
}
_res = NULL;
@@ -33406,9 +33401,9 @@ _tmp_99_rule(Parser *p)
return _res;
}
-// _loop0_100: ',' del_target
+// _loop0_99: ',' del_target
static asdl_seq *
-_loop0_100_rule(Parser *p)
+_loop0_99_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33433,7 +33428,7 @@ _loop0_100_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target"));
+ D(fprintf(stderr, "%*c> _loop0_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target"));
Token * _literal;
expr_ty elem;
while (
@@ -33465,7 +33460,7 @@ _loop0_100_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_100[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_99[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33482,9 +33477,9 @@ _loop0_100_rule(Parser *p)
return _seq;
}
-// _gather_101: del_target _loop0_100
+// _gather_100: del_target _loop0_99
static asdl_seq *
-_gather_101_rule(Parser *p)
+_gather_100_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33495,27 +33490,27 @@ _gather_101_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // del_target _loop0_100
+ { // del_target _loop0_99
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_100"));
+ D(fprintf(stderr, "%*c> _gather_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_99"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = del_target_rule(p)) // del_target
&&
- (seq = _loop0_100_rule(p)) // _loop0_100
+ (seq = _loop0_99_rule(p)) // _loop0_99
)
{
- D(fprintf(stderr, "%*c+ _gather_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_100"));
+ D(fprintf(stderr, "%*c+ _gather_100[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_99"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_101[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_100"));
+ D(fprintf(stderr, "%*c%s _gather_100[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_99"));
}
_res = NULL;
done:
@@ -33523,9 +33518,9 @@ _gather_101_rule(Parser *p)
return _res;
}
-// _loop0_102: ',' expression
+// _loop0_101: ',' expression
static asdl_seq *
-_loop0_102_rule(Parser *p)
+_loop0_101_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33550,7 +33545,7 @@ _loop0_102_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
+ D(fprintf(stderr, "%*c> _loop0_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression"));
Token * _literal;
expr_ty elem;
while (
@@ -33582,7 +33577,7 @@ _loop0_102_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_102[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_101[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33599,9 +33594,9 @@ _loop0_102_rule(Parser *p)
return _seq;
}
-// _gather_103: expression _loop0_102
+// _gather_102: expression _loop0_101
static asdl_seq *
-_gather_103_rule(Parser *p)
+_gather_102_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33612,27 +33607,27 @@ _gather_103_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // expression _loop0_102
+ { // expression _loop0_101
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_102"));
+ D(fprintf(stderr, "%*c> _gather_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_101"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = expression_rule(p)) // expression
&&
- (seq = _loop0_102_rule(p)) // _loop0_102
+ (seq = _loop0_101_rule(p)) // _loop0_101
)
{
- D(fprintf(stderr, "%*c+ _gather_103[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_102"));
+ D(fprintf(stderr, "%*c+ _gather_102[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_101"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_103[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_102"));
+ D(fprintf(stderr, "%*c%s _gather_102[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_101"));
}
_res = NULL;
done:
@@ -33640,9 +33635,9 @@ _gather_103_rule(Parser *p)
return _res;
}
-// _tmp_104: NEWLINE INDENT
+// _tmp_103: NEWLINE INDENT
static void *
-_tmp_104_rule(Parser *p)
+_tmp_103_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33658,7 +33653,7 @@ _tmp_104_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
+ D(fprintf(stderr, "%*c> _tmp_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
Token * indent_var;
Token * newline_var;
if (
@@ -33667,12 +33662,12 @@ _tmp_104_rule(Parser *p)
(indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT'
)
{
- D(fprintf(stderr, "%*c+ _tmp_104[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
+ D(fprintf(stderr, "%*c+ _tmp_103[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT"));
_res = _PyPegen_dummy_name(p, newline_var, indent_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_104[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_103[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT"));
}
_res = NULL;
@@ -33681,11 +33676,11 @@ _tmp_104_rule(Parser *p)
return _res;
}
-// _tmp_105:
+// _tmp_104:
// | (','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)
// | kwargs
static void *
-_tmp_105_rule(Parser *p)
+_tmp_104_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33701,18 +33696,18 @@ _tmp_105_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)"));
- void *_tmp_166_var;
+ D(fprintf(stderr, "%*c> _tmp_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)"));
+ void *_tmp_165_var;
if (
- (_tmp_166_var = _tmp_166_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs
+ (_tmp_165_var = _tmp_165_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs
)
{
- D(fprintf(stderr, "%*c+ _tmp_105[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)"));
- _res = _tmp_166_var;
+ D(fprintf(stderr, "%*c+ _tmp_104[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)"));
+ _res = _tmp_165_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_105[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_104[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs)"));
}
{ // kwargs
@@ -33720,18 +33715,18 @@ _tmp_105_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwargs"));
+ D(fprintf(stderr, "%*c> _tmp_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwargs"));
asdl_seq* kwargs_var;
if (
(kwargs_var = kwargs_rule(p)) // kwargs
)
{
- D(fprintf(stderr, "%*c+ _tmp_105[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwargs"));
+ D(fprintf(stderr, "%*c+ _tmp_104[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwargs"));
_res = kwargs_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_105[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_104[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwargs"));
}
_res = NULL;
@@ -33740,9 +33735,9 @@ _tmp_105_rule(Parser *p)
return _res;
}
-// _loop0_106: ',' (starred_expression !'=')
+// _loop0_105: ',' (starred_expression !'=')
static asdl_seq *
-_loop0_106_rule(Parser *p)
+_loop0_105_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33767,13 +33762,13 @@ _loop0_106_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression !'=')"));
+ D(fprintf(stderr, "%*c> _loop0_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (starred_expression !'=')"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_167_rule(p)) // starred_expression !'='
+ (elem = _tmp_166_rule(p)) // starred_expression !'='
)
{
_res = elem;
@@ -33799,7 +33794,7 @@ _loop0_106_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_106[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_105[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (starred_expression !'=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -33816,9 +33811,9 @@ _loop0_106_rule(Parser *p)
return _seq;
}
-// _gather_107: (starred_expression !'=') _loop0_106
+// _gather_106: (starred_expression !'=') _loop0_105
static asdl_seq *
-_gather_107_rule(Parser *p)
+_gather_106_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33829,27 +33824,27 @@ _gather_107_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (starred_expression !'=') _loop0_106
+ { // (starred_expression !'=') _loop0_105
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression !'=') _loop0_106"));
+ D(fprintf(stderr, "%*c> _gather_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(starred_expression !'=') _loop0_105"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_167_rule(p)) // starred_expression !'='
+ (elem = _tmp_166_rule(p)) // starred_expression !'='
&&
- (seq = _loop0_106_rule(p)) // _loop0_106
+ (seq = _loop0_105_rule(p)) // _loop0_105
)
{
- D(fprintf(stderr, "%*c+ _gather_107[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression !'=') _loop0_106"));
+ D(fprintf(stderr, "%*c+ _gather_106[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(starred_expression !'=') _loop0_105"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_107[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression !'=') _loop0_106"));
+ D(fprintf(stderr, "%*c%s _gather_106[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(starred_expression !'=') _loop0_105"));
}
_res = NULL;
done:
@@ -33857,9 +33852,9 @@ _gather_107_rule(Parser *p)
return _res;
}
-// _tmp_108: args | expression for_if_clauses
+// _tmp_107: args | expression for_if_clauses
static void *
-_tmp_108_rule(Parser *p)
+_tmp_107_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33875,18 +33870,18 @@ _tmp_108_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args"));
+ D(fprintf(stderr, "%*c> _tmp_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args"));
expr_ty args_var;
if (
(args_var = args_rule(p)) // args
)
{
- D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args"));
+ D(fprintf(stderr, "%*c+ _tmp_107[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args"));
_res = args_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_107[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args"));
}
{ // expression for_if_clauses
@@ -33894,7 +33889,7 @@ _tmp_108_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
+ D(fprintf(stderr, "%*c> _tmp_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
expr_ty expression_var;
asdl_comprehension_seq* for_if_clauses_var;
if (
@@ -33903,12 +33898,12 @@ _tmp_108_rule(Parser *p)
(for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses
)
{
- D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
+ D(fprintf(stderr, "%*c+ _tmp_107[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses"));
_res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_107[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses"));
}
_res = NULL;
@@ -33917,9 +33912,9 @@ _tmp_108_rule(Parser *p)
return _res;
}
-// _tmp_109: args ','
+// _tmp_108: args ','
static void *
-_tmp_109_rule(Parser *p)
+_tmp_108_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33935,7 +33930,7 @@ _tmp_109_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','"));
+ D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','"));
Token * _literal;
expr_ty args_var;
if (
@@ -33944,12 +33939,12 @@ _tmp_109_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','"));
+ D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','"));
_res = _PyPegen_dummy_name(p, args_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_109[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','"));
}
_res = NULL;
@@ -33958,9 +33953,9 @@ _tmp_109_rule(Parser *p)
return _res;
}
-// _tmp_110: ',' | ')'
+// _tmp_109: ',' | ')'
static void *
-_tmp_110_rule(Parser *p)
+_tmp_109_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -33976,18 +33971,18 @@ _tmp_110_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_109[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -33995,18 +33990,18 @@ _tmp_110_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_109[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
_res = NULL;
@@ -34015,9 +34010,9 @@ _tmp_110_rule(Parser *p)
return _res;
}
-// _tmp_111: 'True' | 'False' | 'None'
+// _tmp_110: 'True' | 'False' | 'None'
static void *
-_tmp_111_rule(Parser *p)
+_tmp_110_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34033,18 +34028,18 @@ _tmp_111_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 622)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 623)) // token='True'
)
{
- D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'"));
}
{ // 'False'
@@ -34052,18 +34047,18 @@ _tmp_111_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 625)) // token='False'
)
{
- D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'"));
}
{ // 'None'
@@ -34071,18 +34066,18 @@ _tmp_111_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c> _tmp_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='None'
)
{
- D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c+ _tmp_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_110[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
_res = NULL;
@@ -34091,9 +34086,9 @@ _tmp_111_rule(Parser *p)
return _res;
}
-// _tmp_112: NAME '='
+// _tmp_111: NAME '='
static void *
-_tmp_112_rule(Parser *p)
+_tmp_111_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34109,7 +34104,7 @@ _tmp_112_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='"));
+ D(fprintf(stderr, "%*c> _tmp_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '='"));
Token * _literal;
expr_ty name_var;
if (
@@ -34118,12 +34113,12 @@ _tmp_112_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_112[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='"));
+ D(fprintf(stderr, "%*c+ _tmp_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '='"));
_res = _PyPegen_dummy_name(p, name_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_112[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_111[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '='"));
}
_res = NULL;
@@ -34132,9 +34127,9 @@ _tmp_112_rule(Parser *p)
return _res;
}
-// _loop1_113: (!STRING expression_without_invalid)
+// _loop1_112: (!STRING expression_without_invalid)
static asdl_seq *
-_loop1_113_rule(Parser *p)
+_loop1_112_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34159,13 +34154,13 @@ _loop1_113_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop1_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(!STRING expression_without_invalid)"));
- void *_tmp_168_var;
+ D(fprintf(stderr, "%*c> _loop1_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(!STRING expression_without_invalid)"));
+ void *_tmp_167_var;
while (
- (_tmp_168_var = _tmp_168_rule(p)) // !STRING expression_without_invalid
+ (_tmp_167_var = _tmp_167_rule(p)) // !STRING expression_without_invalid
)
{
- _res = _tmp_168_var;
+ _res = _tmp_167_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -34182,7 +34177,7 @@ _loop1_113_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop1_113[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop1_112[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(!STRING expression_without_invalid)"));
}
if (_n == 0 || p->error_indicator) {
@@ -34204,9 +34199,9 @@ _loop1_113_rule(Parser *p)
return _seq;
}
-// _tmp_114: NAME STRING | SOFT_KEYWORD
+// _tmp_113: NAME STRING | SOFT_KEYWORD
static void *
-_tmp_114_rule(Parser *p)
+_tmp_113_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34222,7 +34217,7 @@ _tmp_114_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
+ D(fprintf(stderr, "%*c> _tmp_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
expr_ty name_var;
expr_ty string_var;
if (
@@ -34231,12 +34226,12 @@ _tmp_114_rule(Parser *p)
(string_var = _PyPegen_string_token(p)) // STRING
)
{
- D(fprintf(stderr, "%*c+ _tmp_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
+ D(fprintf(stderr, "%*c+ _tmp_113[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME STRING"));
_res = _PyPegen_dummy_name(p, name_var, string_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_114[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_113[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME STRING"));
}
{ // SOFT_KEYWORD
@@ -34244,18 +34239,18 @@ _tmp_114_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
+ D(fprintf(stderr, "%*c> _tmp_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
expr_ty soft_keyword_var;
if (
(soft_keyword_var = _PyPegen_soft_keyword_token(p)) // SOFT_KEYWORD
)
{
- D(fprintf(stderr, "%*c+ _tmp_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
+ D(fprintf(stderr, "%*c+ _tmp_113[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "SOFT_KEYWORD"));
_res = soft_keyword_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_114[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_113[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "SOFT_KEYWORD"));
}
_res = NULL;
@@ -34264,9 +34259,9 @@ _tmp_114_rule(Parser *p)
return _res;
}
-// _tmp_115: 'else' | ':'
+// _tmp_114: 'else' | ':'
static void *
-_tmp_115_rule(Parser *p)
+_tmp_114_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34282,18 +34277,18 @@ _tmp_115_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
+ D(fprintf(stderr, "%*c> _tmp_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 690)) // token='else'
+ (_keyword = _PyPegen_expect_token(p, 691)) // token='else'
)
{
- D(fprintf(stderr, "%*c+ _tmp_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
+ D(fprintf(stderr, "%*c+ _tmp_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_115[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_114[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else'"));
}
{ // ':'
@@ -34301,18 +34296,18 @@ _tmp_115_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_115[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_114[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -34321,9 +34316,9 @@ _tmp_115_rule(Parser *p)
return _res;
}
-// _tmp_116: pass_stmt | break_stmt | continue_stmt
+// _tmp_115: pass_stmt | break_stmt | continue_stmt
static void *
-_tmp_116_rule(Parser *p)
+_tmp_115_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34339,18 +34334,18 @@ _tmp_116_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pass_stmt"));
+ D(fprintf(stderr, "%*c> _tmp_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "pass_stmt"));
stmt_ty pass_stmt_var;
if (
(pass_stmt_var = pass_stmt_rule(p)) // pass_stmt
)
{
- D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pass_stmt"));
+ D(fprintf(stderr, "%*c+ _tmp_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "pass_stmt"));
_res = pass_stmt_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_115[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "pass_stmt"));
}
{ // break_stmt
@@ -34358,18 +34353,18 @@ _tmp_116_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "break_stmt"));
+ D(fprintf(stderr, "%*c> _tmp_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "break_stmt"));
stmt_ty break_stmt_var;
if (
(break_stmt_var = break_stmt_rule(p)) // break_stmt
)
{
- D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "break_stmt"));
+ D(fprintf(stderr, "%*c+ _tmp_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "break_stmt"));
_res = break_stmt_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_115[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "break_stmt"));
}
{ // continue_stmt
@@ -34377,18 +34372,18 @@ _tmp_116_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "continue_stmt"));
+ D(fprintf(stderr, "%*c> _tmp_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "continue_stmt"));
stmt_ty continue_stmt_var;
if (
(continue_stmt_var = continue_stmt_rule(p)) // continue_stmt
)
{
- D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "continue_stmt"));
+ D(fprintf(stderr, "%*c+ _tmp_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "continue_stmt"));
_res = continue_stmt_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_115[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "continue_stmt"));
}
_res = NULL;
@@ -34397,9 +34392,9 @@ _tmp_116_rule(Parser *p)
return _res;
}
-// _tmp_117: '=' | ':='
+// _tmp_116: '=' | ':='
static void *
-_tmp_117_rule(Parser *p)
+_tmp_116_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34415,18 +34410,18 @@ _tmp_117_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
}
{ // ':='
@@ -34434,18 +34429,18 @@ _tmp_117_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c> _tmp_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 53)) // token=':='
)
{
- D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
+ D(fprintf(stderr, "%*c+ _tmp_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_116[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':='"));
}
_res = NULL;
@@ -34454,9 +34449,9 @@ _tmp_117_rule(Parser *p)
return _res;
}
-// _tmp_118: list | tuple | genexp | 'True' | 'None' | 'False'
+// _tmp_117: list | tuple | genexp | 'True' | 'None' | 'False'
static void *
-_tmp_118_rule(Parser *p)
+_tmp_117_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34472,18 +34467,18 @@ _tmp_118_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list"));
expr_ty list_var;
if (
(list_var = list_rule(p)) // list
)
{
- D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
+ D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list"));
_res = list_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_118[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list"));
}
{ // tuple
@@ -34491,18 +34486,18 @@ _tmp_118_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple"));
expr_ty tuple_var;
if (
(tuple_var = tuple_rule(p)) // tuple
)
{
- D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
+ D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple"));
_res = tuple_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_118[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple"));
}
{ // genexp
@@ -34510,18 +34505,18 @@ _tmp_118_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp"));
expr_ty genexp_var;
if (
(genexp_var = genexp_rule(p)) // genexp
)
{
- D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
+ D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp"));
_res = genexp_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_118[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp"));
}
{ // 'True'
@@ -34529,18 +34524,18 @@ _tmp_118_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 622)) // token='True'
+ (_keyword = _PyPegen_expect_token(p, 623)) // token='True'
)
{
- D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
+ D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_118[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'"));
}
{ // 'None'
@@ -34548,18 +34543,18 @@ _tmp_118_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 623)) // token='None'
+ (_keyword = _PyPegen_expect_token(p, 624)) // token='None'
)
{
- D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
+ D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_118[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'"));
}
{ // 'False'
@@ -34567,18 +34562,18 @@ _tmp_118_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c> _tmp_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 624)) // token='False'
+ (_keyword = _PyPegen_expect_token(p, 625)) // token='False'
)
{
- D(fprintf(stderr, "%*c+ _tmp_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
+ D(fprintf(stderr, "%*c+ _tmp_117[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_118[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_117[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'"));
}
_res = NULL;
@@ -34587,9 +34582,9 @@ _tmp_118_rule(Parser *p)
return _res;
}
-// _loop0_119: star_named_expressions
+// _loop0_118: star_named_expressions
static asdl_seq *
-_loop0_119_rule(Parser *p)
+_loop0_118_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34614,7 +34609,7 @@ _loop0_119_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions"));
+ D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions"));
asdl_expr_seq* star_named_expressions_var;
while (
(star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions
@@ -34637,7 +34632,7 @@ _loop0_119_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34654,9 +34649,9 @@ _loop0_119_rule(Parser *p)
return _seq;
}
-// _loop0_120: (star_targets '=')
+// _loop0_119: (star_targets '=')
static asdl_seq *
-_loop0_120_rule(Parser *p)
+_loop0_119_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34681,13 +34676,13 @@ _loop0_120_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
- void *_tmp_155_var;
+ D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')"));
+ void *_tmp_154_var;
while (
- (_tmp_155_var = _tmp_155_rule(p)) // star_targets '='
+ (_tmp_154_var = _tmp_154_rule(p)) // star_targets '='
)
{
- _res = _tmp_155_var;
+ _res = _tmp_154_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -34704,7 +34699,7 @@ _loop0_120_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_120[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -34721,9 +34716,9 @@ _loop0_120_rule(Parser *p)
return _seq;
}
-// _tmp_121: '[' | '(' | '{'
+// _tmp_120: '[' | '(' | '{'
static void *
-_tmp_121_rule(Parser *p)
+_tmp_120_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34739,18 +34734,18 @@ _tmp_121_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_121[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '('
@@ -34758,18 +34753,18 @@ _tmp_121_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 7)) // token='('
)
{
- D(fprintf(stderr, "%*c+ _tmp_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
+ D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_121[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('"));
}
{ // '{'
@@ -34777,18 +34772,18 @@ _tmp_121_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_121[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -34797,9 +34792,9 @@ _tmp_121_rule(Parser *p)
return _res;
}
-// _tmp_122: '[' | '{'
+// _tmp_121: '[' | '{'
static void *
-_tmp_122_rule(Parser *p)
+_tmp_121_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34815,18 +34810,18 @@ _tmp_122_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c> _tmp_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 9)) // token='['
)
{
- D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
+ D(fprintf(stderr, "%*c+ _tmp_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_122[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_121[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['"));
}
{ // '{'
@@ -34834,18 +34829,18 @@ _tmp_122_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c> _tmp_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 25)) // token='{'
)
{
- D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
+ D(fprintf(stderr, "%*c+ _tmp_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_122[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_121[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'"));
}
_res = NULL;
@@ -34854,9 +34849,9 @@ _tmp_122_rule(Parser *p)
return _res;
}
-// _tmp_123: slash_no_default | slash_with_default
+// _tmp_122: slash_no_default | slash_with_default
static void *
-_tmp_123_rule(Parser *p)
+_tmp_122_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34872,18 +34867,18 @@ _tmp_123_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
asdl_arg_seq* slash_no_default_var;
if (
(slash_no_default_var = slash_no_default_rule(p)) // slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default"));
_res = slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_122[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default"));
}
{ // slash_with_default
@@ -34891,18 +34886,18 @@ _tmp_123_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
SlashWithDefault* slash_with_default_var;
if (
(slash_with_default_var = slash_with_default_rule(p)) // slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default"));
_res = slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_122[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default"));
}
_res = NULL;
@@ -34911,9 +34906,9 @@ _tmp_123_rule(Parser *p)
return _res;
}
-// _tmp_124: ',' | param_no_default
+// _tmp_123: ',' | param_no_default
static void *
-_tmp_124_rule(Parser *p)
+_tmp_123_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34929,18 +34924,18 @@ _tmp_124_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_124[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_124[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // param_no_default
@@ -34948,18 +34943,18 @@ _tmp_124_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_124[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_124[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_123[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
_res = NULL;
@@ -34968,9 +34963,9 @@ _tmp_124_rule(Parser *p)
return _res;
}
-// _tmp_125: ')' | ','
+// _tmp_124: ')' | ','
static void *
-_tmp_125_rule(Parser *p)
+_tmp_124_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -34986,18 +34981,18 @@ _tmp_125_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_124[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_124[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ','
@@ -35005,18 +35000,18 @@ _tmp_125_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_124[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_124[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -35025,9 +35020,9 @@ _tmp_125_rule(Parser *p)
return _res;
}
-// _tmp_126: ')' | ',' (')' | '**')
+// _tmp_125: ')' | ',' (')' | '**')
static void *
-_tmp_126_rule(Parser *p)
+_tmp_125_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35043,18 +35038,18 @@ _tmp_126_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_126[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // ',' (')' | '**')
@@ -35062,21 +35057,21 @@ _tmp_126_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
+ D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
Token * _literal;
- void *_tmp_169_var;
+ void *_tmp_168_var;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_tmp_169_var = _tmp_169_rule(p)) // ')' | '**'
+ (_tmp_168_var = _tmp_168_rule(p)) // ')' | '**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
- _res = _PyPegen_dummy_name(p, _literal, _tmp_169_var);
+ D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_168_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_126[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')"));
}
_res = NULL;
@@ -35085,9 +35080,9 @@ _tmp_126_rule(Parser *p)
return _res;
}
-// _tmp_127: param_no_default | ','
+// _tmp_126: param_no_default | ','
static void *
-_tmp_127_rule(Parser *p)
+_tmp_126_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35103,18 +35098,18 @@ _tmp_127_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default"));
arg_ty param_no_default_var;
if (
(param_no_default_var = param_no_default_rule(p)) // param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default"));
_res = param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_126[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default"));
}
{ // ','
@@ -35122,18 +35117,18 @@ _tmp_127_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_126[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -35142,9 +35137,9 @@ _tmp_127_rule(Parser *p)
return _res;
}
-// _tmp_128: '*' | '**' | '/'
+// _tmp_127: '*' | '**' | '/'
static void *
-_tmp_128_rule(Parser *p)
+_tmp_127_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35160,18 +35155,18 @@ _tmp_128_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
)
{
- D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'"));
}
{ // '**'
@@ -35179,18 +35174,18 @@ _tmp_128_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
{ // '/'
@@ -35198,18 +35193,18 @@ _tmp_128_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
)
{
- D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'"));
}
_res = NULL;
@@ -35218,9 +35213,9 @@ _tmp_128_rule(Parser *p)
return _res;
}
-// _tmp_129: lambda_slash_no_default | lambda_slash_with_default
+// _tmp_128: lambda_slash_no_default | lambda_slash_with_default
static void *
-_tmp_129_rule(Parser *p)
+_tmp_128_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35236,18 +35231,18 @@ _tmp_129_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
asdl_arg_seq* lambda_slash_no_default_var;
if (
(lambda_slash_no_default_var = lambda_slash_no_default_rule(p)) // lambda_slash_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default"));
_res = lambda_slash_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_129[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default"));
}
{ // lambda_slash_with_default
@@ -35255,18 +35250,18 @@ _tmp_129_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
SlashWithDefault* lambda_slash_with_default_var;
if (
(lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
+ D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default"));
_res = lambda_slash_with_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_129[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default"));
}
_res = NULL;
@@ -35275,9 +35270,9 @@ _tmp_129_rule(Parser *p)
return _res;
}
-// _loop0_130: ',' lambda_param
+// _loop0_129: ',' lambda_param
static asdl_seq *
-_loop0_130_rule(Parser *p)
+_loop0_129_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35302,7 +35297,7 @@ _loop0_130_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param"));
+ D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' lambda_param"));
Token * _literal;
arg_ty elem;
while (
@@ -35334,7 +35329,7 @@ _loop0_130_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_130[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' lambda_param"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35351,9 +35346,9 @@ _loop0_130_rule(Parser *p)
return _seq;
}
-// _gather_131: lambda_param _loop0_130
+// _gather_130: lambda_param _loop0_129
static asdl_seq *
-_gather_131_rule(Parser *p)
+_gather_130_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35364,27 +35359,27 @@ _gather_131_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // lambda_param _loop0_130
+ { // lambda_param _loop0_129
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_130"));
+ D(fprintf(stderr, "%*c> _gather_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_129"));
arg_ty elem;
asdl_seq * seq;
if (
(elem = lambda_param_rule(p)) // lambda_param
&&
- (seq = _loop0_130_rule(p)) // _loop0_130
+ (seq = _loop0_129_rule(p)) // _loop0_129
)
{
- D(fprintf(stderr, "%*c+ _gather_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_130"));
+ D(fprintf(stderr, "%*c+ _gather_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param _loop0_129"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_131[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_130"));
+ D(fprintf(stderr, "%*c%s _gather_130[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param _loop0_129"));
}
_res = NULL;
done:
@@ -35392,9 +35387,9 @@ _gather_131_rule(Parser *p)
return _res;
}
-// _tmp_132: ',' | lambda_param_no_default
+// _tmp_131: ',' | lambda_param_no_default
static void *
-_tmp_132_rule(Parser *p)
+_tmp_131_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35410,18 +35405,18 @@ _tmp_132_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // lambda_param_no_default
@@ -35429,18 +35424,18 @@ _tmp_132_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
_res = NULL;
@@ -35449,9 +35444,9 @@ _tmp_132_rule(Parser *p)
return _res;
}
-// _tmp_133: ':' | ',' (':' | '**')
+// _tmp_132: ':' | ',' (':' | '**')
static void *
-_tmp_133_rule(Parser *p)
+_tmp_132_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35467,18 +35462,18 @@ _tmp_133_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // ',' (':' | '**')
@@ -35486,21 +35481,21 @@ _tmp_133_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
+ D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
Token * _literal;
- void *_tmp_170_var;
+ void *_tmp_169_var;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (_tmp_170_var = _tmp_170_rule(p)) // ':' | '**'
+ (_tmp_169_var = _tmp_169_rule(p)) // ':' | '**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
- _res = _PyPegen_dummy_name(p, _literal, _tmp_170_var);
+ D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')"));
+ _res = _PyPegen_dummy_name(p, _literal, _tmp_169_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')"));
}
_res = NULL;
@@ -35509,9 +35504,9 @@ _tmp_133_rule(Parser *p)
return _res;
}
-// _tmp_134: lambda_param_no_default | ','
+// _tmp_133: lambda_param_no_default | ','
static void *
-_tmp_134_rule(Parser *p)
+_tmp_133_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35527,18 +35522,18 @@ _tmp_134_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
arg_ty lambda_param_no_default_var;
if (
(lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default
)
{
- D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
+ D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default"));
_res = lambda_param_no_default_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default"));
}
{ // ','
@@ -35546,18 +35541,18 @@ _tmp_134_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -35566,9 +35561,9 @@ _tmp_134_rule(Parser *p)
return _res;
}
-// _tmp_135: bitwise_or ((',' bitwise_or))* ','?
+// _tmp_134: bitwise_or ((',' bitwise_or))* ','?
static void *
-_tmp_135_rule(Parser *p)
+_tmp_134_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35584,25 +35579,25 @@ _tmp_135_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?"));
- asdl_seq * _loop0_171_var;
+ D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?"));
+ asdl_seq * _loop0_170_var;
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty bitwise_or_var;
if (
(bitwise_or_var = bitwise_or_rule(p)) // bitwise_or
&&
- (_loop0_171_var = _loop0_171_rule(p)) // ((',' bitwise_or))*
+ (_loop0_170_var = _loop0_170_rule(p)) // ((',' bitwise_or))*
&&
(_opt_var = _PyPegen_expect_token(p, 12), !p->error_indicator) // ','?
)
{
- D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?"));
- _res = _PyPegen_dummy_name(p, bitwise_or_var, _loop0_171_var, _opt_var);
+ D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?"));
+ _res = _PyPegen_dummy_name(p, bitwise_or_var, _loop0_170_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or ((',' bitwise_or))* ','?"));
}
_res = NULL;
@@ -35611,9 +35606,9 @@ _tmp_135_rule(Parser *p)
return _res;
}
-// _loop0_136: ',' dotted_name
+// _loop0_135: ',' dotted_name
static asdl_seq *
-_loop0_136_rule(Parser *p)
+_loop0_135_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35638,7 +35633,7 @@ _loop0_136_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_name"));
+ D(fprintf(stderr, "%*c> _loop0_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_name"));
Token * _literal;
expr_ty elem;
while (
@@ -35670,7 +35665,7 @@ _loop0_136_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_136[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_135[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_name"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35687,9 +35682,9 @@ _loop0_136_rule(Parser *p)
return _seq;
}
-// _gather_137: dotted_name _loop0_136
+// _gather_136: dotted_name _loop0_135
static asdl_seq *
-_gather_137_rule(Parser *p)
+_gather_136_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35700,27 +35695,27 @@ _gather_137_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // dotted_name _loop0_136
+ { // dotted_name _loop0_135
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_136"));
+ D(fprintf(stderr, "%*c> _gather_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_135"));
expr_ty elem;
asdl_seq * seq;
if (
(elem = dotted_name_rule(p)) // dotted_name
&&
- (seq = _loop0_136_rule(p)) // _loop0_136
+ (seq = _loop0_135_rule(p)) // _loop0_135
)
{
- D(fprintf(stderr, "%*c+ _gather_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_136"));
+ D(fprintf(stderr, "%*c+ _gather_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name _loop0_135"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_137[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name _loop0_136"));
+ D(fprintf(stderr, "%*c%s _gather_136[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name _loop0_135"));
}
_res = NULL;
done:
@@ -35728,9 +35723,9 @@ _gather_137_rule(Parser *p)
return _res;
}
-// _tmp_138: NAME (',' | ')' | NEWLINE)
+// _tmp_137: NAME (',' | ')' | NEWLINE)
static void *
-_tmp_138_rule(Parser *p)
+_tmp_137_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35746,21 +35741,21 @@ _tmp_138_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME (',' | ')' | NEWLINE)"));
- void *_tmp_172_var;
+ D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME (',' | ')' | NEWLINE)"));
+ void *_tmp_171_var;
expr_ty name_var;
if (
(name_var = _PyPegen_name_token(p)) // NAME
&&
- (_tmp_172_var = _tmp_172_rule(p)) // ',' | ')' | NEWLINE
+ (_tmp_171_var = _tmp_171_rule(p)) // ',' | ')' | NEWLINE
)
{
- D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME (',' | ')' | NEWLINE)"));
- _res = _PyPegen_dummy_name(p, name_var, _tmp_172_var);
+ D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME (',' | ')' | NEWLINE)"));
+ _res = _PyPegen_dummy_name(p, name_var, _tmp_171_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME (',' | ')' | NEWLINE)"));
}
_res = NULL;
@@ -35769,9 +35764,9 @@ _tmp_138_rule(Parser *p)
return _res;
}
-// _loop0_139: ',' (expression ['as' star_target])
+// _loop0_138: ',' (expression ['as' star_target])
static asdl_seq *
-_loop0_139_rule(Parser *p)
+_loop0_138_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35796,13 +35791,13 @@ _loop0_139_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expression ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_173_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_172_rule(p)) // expression ['as' star_target]
)
{
_res = elem;
@@ -35828,7 +35823,7 @@ _loop0_139_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_139[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_138[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expression ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35845,9 +35840,9 @@ _loop0_139_rule(Parser *p)
return _seq;
}
-// _gather_140: (expression ['as' star_target]) _loop0_139
+// _gather_139: (expression ['as' star_target]) _loop0_138
static asdl_seq *
-_gather_140_rule(Parser *p)
+_gather_139_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35858,27 +35853,27 @@ _gather_140_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expression ['as' star_target]) _loop0_139
+ { // (expression ['as' star_target]) _loop0_138
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_139"));
+ D(fprintf(stderr, "%*c> _gather_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_138"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_173_rule(p)) // expression ['as' star_target]
+ (elem = _tmp_172_rule(p)) // expression ['as' star_target]
&&
- (seq = _loop0_139_rule(p)) // _loop0_139
+ (seq = _loop0_138_rule(p)) // _loop0_138
)
{
- D(fprintf(stderr, "%*c+ _gather_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_139"));
+ D(fprintf(stderr, "%*c+ _gather_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expression ['as' star_target]) _loop0_138"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_140[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_139"));
+ D(fprintf(stderr, "%*c%s _gather_139[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expression ['as' star_target]) _loop0_138"));
}
_res = NULL;
done:
@@ -35886,9 +35881,9 @@ _gather_140_rule(Parser *p)
return _res;
}
-// _loop0_141: ',' (expressions ['as' star_target])
+// _loop0_140: ',' (expressions ['as' star_target])
static asdl_seq *
-_loop0_141_rule(Parser *p)
+_loop0_140_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35913,13 +35908,13 @@ _loop0_141_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
+ D(fprintf(stderr, "%*c> _loop0_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (expressions ['as' star_target])"));
Token * _literal;
void *elem;
while (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
- (elem = _tmp_174_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_173_rule(p)) // expressions ['as' star_target]
)
{
_res = elem;
@@ -35945,7 +35940,7 @@ _loop0_141_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_141[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_140[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (expressions ['as' star_target])"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -35962,9 +35957,9 @@ _loop0_141_rule(Parser *p)
return _seq;
}
-// _gather_142: (expressions ['as' star_target]) _loop0_141
+// _gather_141: (expressions ['as' star_target]) _loop0_140
static asdl_seq *
-_gather_142_rule(Parser *p)
+_gather_141_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -35975,27 +35970,27 @@ _gather_142_rule(Parser *p)
}
asdl_seq * _res = NULL;
int _mark = p->mark;
- { // (expressions ['as' star_target]) _loop0_141
+ { // (expressions ['as' star_target]) _loop0_140
if (p->error_indicator) {
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _gather_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_141"));
+ D(fprintf(stderr, "%*c> _gather_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_140"));
void *elem;
asdl_seq * seq;
if (
- (elem = _tmp_174_rule(p)) // expressions ['as' star_target]
+ (elem = _tmp_173_rule(p)) // expressions ['as' star_target]
&&
- (seq = _loop0_141_rule(p)) // _loop0_141
+ (seq = _loop0_140_rule(p)) // _loop0_140
)
{
- D(fprintf(stderr, "%*c+ _gather_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_141"));
+ D(fprintf(stderr, "%*c+ _gather_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(expressions ['as' star_target]) _loop0_140"));
_res = _PyPegen_seq_insert_in_front(p, elem, seq);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _gather_142[%d-%d]: %s failed!\n", p->level, ' ',
- p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_141"));
+ D(fprintf(stderr, "%*c%s _gather_141[%d-%d]: %s failed!\n", p->level, ' ',
+ p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(expressions ['as' star_target]) _loop0_140"));
}
_res = NULL;
done:
@@ -36003,9 +35998,9 @@ _gather_142_rule(Parser *p)
return _res;
}
-// _tmp_143: 'except' | 'finally'
+// _tmp_142: 'except' | 'finally'
static void *
-_tmp_143_rule(Parser *p)
+_tmp_142_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36021,18 +36016,18 @@ _tmp_143_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
+ D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 681)) // token='except'
+ (_keyword = _PyPegen_expect_token(p, 682)) // token='except'
)
{
- D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
+ D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except'"));
}
{ // 'finally'
@@ -36040,18 +36035,18 @@ _tmp_143_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
+ D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally'"));
Token * _keyword;
if (
- (_keyword = _PyPegen_expect_token(p, 677)) // token='finally'
+ (_keyword = _PyPegen_expect_token(p, 678)) // token='finally'
)
{
- D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
+ D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally'"));
_res = _keyword;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally'"));
}
_res = NULL;
@@ -36060,9 +36055,9 @@ _tmp_143_rule(Parser *p)
return _res;
}
-// _loop0_144: block
+// _loop0_143: block
static asdl_seq *
-_loop0_144_rule(Parser *p)
+_loop0_143_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36087,7 +36082,7 @@ _loop0_144_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
+ D(fprintf(stderr, "%*c> _loop0_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "block"));
asdl_stmt_seq* block_var;
while (
(block_var = block_rule(p)) // block
@@ -36110,7 +36105,7 @@ _loop0_144_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_144[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_143[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "block"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -36127,9 +36122,9 @@ _loop0_144_rule(Parser *p)
return _seq;
}
-// _tmp_145: expression ['as' NAME]
+// _tmp_144: expression ['as' NAME]
static void *
-_tmp_145_rule(Parser *p)
+_tmp_144_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36145,22 +36140,22 @@ _tmp_145_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
+ D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_22_rule(p), !p->error_indicator) // ['as' NAME]
+ (_opt_var = _tmp_21_rule(p), !p->error_indicator) // ['as' NAME]
)
{
- D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
+ D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' NAME]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' NAME]"));
}
_res = NULL;
@@ -36169,9 +36164,9 @@ _tmp_145_rule(Parser *p)
return _res;
}
-// _tmp_146: NEWLINE | ':'
+// _tmp_145: NEWLINE | ':'
static void *
-_tmp_146_rule(Parser *p)
+_tmp_145_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36187,18 +36182,18 @@ _tmp_146_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
Token * newline_var;
if (
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
_res = newline_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE"));
}
{ // ':'
@@ -36206,18 +36201,18 @@ _tmp_146_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
_res = NULL;
@@ -36226,9 +36221,9 @@ _tmp_146_rule(Parser *p)
return _res;
}
-// _tmp_147: positional_patterns ','
+// _tmp_146: positional_patterns ','
static void *
-_tmp_147_rule(Parser *p)
+_tmp_146_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36244,7 +36239,7 @@ _tmp_147_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
+ D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
Token * _literal;
asdl_pattern_seq* positional_patterns_var;
if (
@@ -36253,12 +36248,12 @@ _tmp_147_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
+ D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "positional_patterns ','"));
_res = _PyPegen_dummy_name(p, positional_patterns_var, _literal);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "positional_patterns ','"));
}
_res = NULL;
@@ -36267,9 +36262,9 @@ _tmp_147_rule(Parser *p)
return _res;
}
-// _tmp_148: '}' | ','
+// _tmp_147: '}' | ','
static void *
-_tmp_148_rule(Parser *p)
+_tmp_147_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36285,18 +36280,18 @@ _tmp_148_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
{ // ','
@@ -36304,18 +36299,18 @@ _tmp_148_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
_res = NULL;
@@ -36324,9 +36319,9 @@ _tmp_148_rule(Parser *p)
return _res;
}
-// _tmp_149: '=' | '!' | ':' | '}'
+// _tmp_148: '=' | '!' | ':' | '}'
static void *
-_tmp_149_rule(Parser *p)
+_tmp_148_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36342,18 +36337,18 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'='"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'='"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'='"));
}
{ // '!'
@@ -36361,18 +36356,18 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 54)) // token='!'
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'"));
}
{ // ':'
@@ -36380,18 +36375,18 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // '}'
@@ -36399,18 +36394,18 @@ _tmp_149_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
_res = NULL;
@@ -36419,9 +36414,9 @@ _tmp_149_rule(Parser *p)
return _res;
}
-// _tmp_150: '!' | ':' | '}'
+// _tmp_149: '!' | ':' | '}'
static void *
-_tmp_150_rule(Parser *p)
+_tmp_149_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36437,18 +36432,18 @@ _tmp_150_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
+ D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 54)) // token='!'
)
{
- D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
+ D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!'"));
}
{ // ':'
@@ -36456,18 +36451,18 @@ _tmp_150_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // '}'
@@ -36475,18 +36470,18 @@ _tmp_150_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
_res = NULL;
@@ -36495,9 +36490,9 @@ _tmp_150_rule(Parser *p)
return _res;
}
-// _tmp_151: '!' NAME
+// _tmp_150: '!' NAME
static void *
-_tmp_151_rule(Parser *p)
+_tmp_150_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36513,7 +36508,7 @@ _tmp_151_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
Token * _literal;
expr_ty name_var;
if (
@@ -36522,12 +36517,12 @@ _tmp_151_rule(Parser *p)
(name_var = _PyPegen_name_token(p)) // NAME
)
{
- D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
+ D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!' NAME"));
_res = _PyPegen_dummy_name(p, _literal, name_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!' NAME"));
}
_res = NULL;
@@ -36536,9 +36531,9 @@ _tmp_151_rule(Parser *p)
return _res;
}
-// _tmp_152: ':' | '}'
+// _tmp_151: ':' | '}'
static void *
-_tmp_152_rule(Parser *p)
+_tmp_151_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36554,18 +36549,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // '}'
@@ -36573,18 +36568,18 @@ _tmp_152_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'}'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 26)) // token='}'
)
{
- D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
+ D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'}'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'}'"));
}
_res = NULL;
@@ -36593,9 +36588,9 @@ _tmp_152_rule(Parser *p)
return _res;
}
-// _tmp_153: '+' | '-' | '*' | '/' | '%' | '//' | '@'
+// _tmp_152: '+' | '-' | '*' | '/' | '%' | '//' | '@'
static void *
-_tmp_153_rule(Parser *p)
+_tmp_152_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36611,18 +36606,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 14)) // token='+'
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'"));
}
{ // '-'
@@ -36630,18 +36625,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 15)) // token='-'
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'"));
}
{ // '*'
@@ -36649,18 +36644,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 16)) // token='*'
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'"));
}
{ // '/'
@@ -36668,18 +36663,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 17)) // token='/'
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/'"));
}
{ // '%'
@@ -36687,18 +36682,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'%'"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'%'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 24)) // token='%'
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'%'"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'%'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'%'"));
}
{ // '//'
@@ -36706,18 +36701,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'//'"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'//'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 47)) // token='//'
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'//'"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'//'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'//'"));
}
{ // '@'
@@ -36725,18 +36720,18 @@ _tmp_153_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c> _tmp_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 49)) // token='@'
)
{
- D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
+ D(fprintf(stderr, "%*c+ _tmp_152[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_152[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'"));
}
_res = NULL;
@@ -36745,9 +36740,9 @@ _tmp_153_rule(Parser *p)
return _res;
}
-// _tmp_154: '+' | '-' | '~'
+// _tmp_153: '+' | '-' | '~'
static void *
-_tmp_154_rule(Parser *p)
+_tmp_153_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36763,18 +36758,18 @@ _tmp_154_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
+ D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 14)) // token='+'
)
{
- D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
+ D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+'"));
}
{ // '-'
@@ -36782,18 +36777,18 @@ _tmp_154_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 15)) // token='-'
)
{
- D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
+ D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-'"));
}
{ // '~'
@@ -36801,18 +36796,18 @@ _tmp_154_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'~'"));
+ D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'~'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 31)) // token='~'
)
{
- D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'~'"));
+ D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'~'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'~'"));
}
_res = NULL;
@@ -36821,9 +36816,9 @@ _tmp_154_rule(Parser *p)
return _res;
}
-// _tmp_155: star_targets '='
+// _tmp_154: star_targets '='
static void *
-_tmp_155_rule(Parser *p)
+_tmp_154_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36839,7 +36834,7 @@ _tmp_155_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
Token * _literal;
expr_ty z;
if (
@@ -36848,7 +36843,7 @@ _tmp_155_rule(Parser *p)
(_literal = _PyPegen_expect_token(p, 22)) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
+ D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -36858,7 +36853,7 @@ _tmp_155_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='"));
}
_res = NULL;
@@ -36867,9 +36862,9 @@ _tmp_155_rule(Parser *p)
return _res;
}
-// _tmp_156: '.' | '...'
+// _tmp_155: '.' | '...'
static void *
-_tmp_156_rule(Parser *p)
+_tmp_155_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36885,18 +36880,18 @@ _tmp_156_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 23)) // token='.'
)
{
- D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
+ D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'"));
}
{ // '...'
@@ -36904,18 +36899,18 @@ _tmp_156_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 52)) // token='...'
)
{
- D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
+ D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'"));
}
_res = NULL;
@@ -36924,9 +36919,9 @@ _tmp_156_rule(Parser *p)
return _res;
}
-// _tmp_157: '@' named_expression NEWLINE
+// _tmp_156: '@' named_expression NEWLINE
static void *
-_tmp_157_rule(Parser *p)
+_tmp_156_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36942,7 +36937,7 @@ _tmp_157_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_156[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
Token * _literal;
expr_ty f;
Token * newline_var;
@@ -36954,7 +36949,7 @@ _tmp_157_rule(Parser *p)
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_156[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE"));
_res = f;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -36964,7 +36959,7 @@ _tmp_157_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_156[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE"));
}
_res = NULL;
@@ -36973,9 +36968,9 @@ _tmp_157_rule(Parser *p)
return _res;
}
-// _tmp_158: ',' star_expression
+// _tmp_157: ',' star_expression
static void *
-_tmp_158_rule(Parser *p)
+_tmp_157_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -36991,7 +36986,7 @@ _tmp_158_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
+ D(fprintf(stderr, "%*c> _tmp_157[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
Token * _literal;
expr_ty c;
if (
@@ -37000,7 +36995,7 @@ _tmp_158_rule(Parser *p)
(c = star_expression_rule(p)) // star_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_157[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37010,7 +37005,7 @@ _tmp_158_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_157[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression"));
}
_res = NULL;
@@ -37019,9 +37014,9 @@ _tmp_158_rule(Parser *p)
return _res;
}
-// _tmp_159: 'or' conjunction
+// _tmp_158: 'or' conjunction
static void *
-_tmp_159_rule(Parser *p)
+_tmp_158_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37037,16 +37032,16 @@ _tmp_159_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
+ D(fprintf(stderr, "%*c> _tmp_158[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 588)) // token='or'
+ (_keyword = _PyPegen_expect_token(p, 589)) // token='or'
&&
(c = conjunction_rule(p)) // conjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_158[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37056,7 +37051,7 @@ _tmp_159_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_158[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction"));
}
_res = NULL;
@@ -37065,9 +37060,9 @@ _tmp_159_rule(Parser *p)
return _res;
}
-// _tmp_160: 'and' inversion
+// _tmp_159: 'and' inversion
static void *
-_tmp_160_rule(Parser *p)
+_tmp_159_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37083,16 +37078,16 @@ _tmp_160_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
+ D(fprintf(stderr, "%*c> _tmp_159[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
Token * _keyword;
expr_ty c;
if (
- (_keyword = _PyPegen_expect_token(p, 589)) // token='and'
+ (_keyword = _PyPegen_expect_token(p, 590)) // token='and'
&&
(c = inversion_rule(p)) // inversion
)
{
- D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
+ D(fprintf(stderr, "%*c+ _tmp_159[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37102,7 +37097,7 @@ _tmp_160_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_159[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion"));
}
_res = NULL;
@@ -37111,9 +37106,9 @@ _tmp_160_rule(Parser *p)
return _res;
}
-// _tmp_161: slice | starred_expression
+// _tmp_160: slice | starred_expression
static void *
-_tmp_161_rule(Parser *p)
+_tmp_160_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37129,18 +37124,18 @@ _tmp_161_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice"));
+ D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice"));
expr_ty slice_var;
if (
(slice_var = slice_rule(p)) // slice
)
{
- D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice"));
+ D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice"));
_res = slice_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice"));
}
{ // starred_expression
@@ -37148,18 +37143,18 @@ _tmp_161_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c> _tmp_160[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
expr_ty starred_expression_var;
if (
(starred_expression_var = starred_expression_rule(p)) // starred_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_160[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
_res = starred_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_160[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression"));
}
_res = NULL;
@@ -37168,9 +37163,9 @@ _tmp_161_rule(Parser *p)
return _res;
}
-// _tmp_162: fstring | string | tstring
+// _tmp_161: fstring | string | tstring
static void *
-_tmp_162_rule(Parser *p)
+_tmp_161_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37186,18 +37181,18 @@ _tmp_162_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring"));
+ D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "fstring"));
expr_ty fstring_var;
if (
(fstring_var = fstring_rule(p)) // fstring
)
{
- D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring"));
+ D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "fstring"));
_res = fstring_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "fstring"));
}
{ // string
@@ -37205,18 +37200,18 @@ _tmp_162_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string"));
+ D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "string"));
expr_ty string_var;
if (
(string_var = string_rule(p)) // string
)
{
- D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string"));
+ D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "string"));
_res = string_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "string"));
}
{ // tstring
@@ -37224,18 +37219,18 @@ _tmp_162_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tstring"));
+ D(fprintf(stderr, "%*c> _tmp_161[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tstring"));
expr_ty tstring_var;
if (
(tstring_var = tstring_rule(p)) // tstring
)
{
- D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tstring"));
+ D(fprintf(stderr, "%*c+ _tmp_161[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tstring"));
_res = tstring_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_161[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tstring"));
}
_res = NULL;
@@ -37244,9 +37239,9 @@ _tmp_162_rule(Parser *p)
return _res;
}
-// _tmp_163: 'if' disjunction
+// _tmp_162: 'if' disjunction
static void *
-_tmp_163_rule(Parser *p)
+_tmp_162_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37262,16 +37257,16 @@ _tmp_163_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c> _tmp_162[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
Token * _keyword;
expr_ty z;
if (
- (_keyword = _PyPegen_expect_token(p, 686)) // token='if'
+ (_keyword = _PyPegen_expect_token(p, 687)) // token='if'
&&
(z = disjunction_rule(p)) // disjunction
)
{
- D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
+ D(fprintf(stderr, "%*c+ _tmp_162[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction"));
_res = z;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37281,7 +37276,7 @@ _tmp_163_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_162[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction"));
}
_res = NULL;
@@ -37290,9 +37285,9 @@ _tmp_163_rule(Parser *p)
return _res;
}
-// _tmp_164: starred_expression | (assignment_expression | expression !':=') !'='
+// _tmp_163: starred_expression | (assignment_expression | expression !':=') !'='
static void *
-_tmp_164_rule(Parser *p)
+_tmp_163_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37308,18 +37303,18 @@ _tmp_164_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression"));
expr_ty starred_expression_var;
if (
(starred_expression_var = starred_expression_rule(p)) // starred_expression
)
{
- D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression"));
_res = starred_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression"));
}
{ // (assignment_expression | expression !':=') !'='
@@ -37327,20 +37322,20 @@ _tmp_164_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
- void *_tmp_87_var;
+ D(fprintf(stderr, "%*c> _tmp_163[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
+ void *_tmp_86_var;
if (
- (_tmp_87_var = _tmp_87_rule(p)) // assignment_expression | expression !':='
+ (_tmp_86_var = _tmp_86_rule(p)) // assignment_expression | expression !':='
&&
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
- _res = _tmp_87_var;
+ D(fprintf(stderr, "%*c+ _tmp_163[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
+ _res = _tmp_86_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_163[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(assignment_expression | expression !':=') !'='"));
}
_res = NULL;
@@ -37349,9 +37344,9 @@ _tmp_164_rule(Parser *p)
return _res;
}
-// _tmp_165: ',' star_target
+// _tmp_164: ',' star_target
static void *
-_tmp_165_rule(Parser *p)
+_tmp_164_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37367,7 +37362,7 @@ _tmp_165_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_164[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target"));
Token * _literal;
expr_ty c;
if (
@@ -37376,7 +37371,7 @@ _tmp_165_rule(Parser *p)
(c = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_164[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target"));
_res = c;
if (_res == NULL && PyErr_Occurred()) {
p->error_indicator = 1;
@@ -37386,7 +37381,7 @@ _tmp_165_rule(Parser *p)
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_164[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target"));
}
_res = NULL;
@@ -37395,10 +37390,10 @@ _tmp_165_rule(Parser *p)
return _res;
}
-// _tmp_166:
+// _tmp_165:
// | ','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs
static void *
-_tmp_166_rule(Parser *p)
+_tmp_165_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37414,24 +37409,24 @@ _tmp_166_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs"));
- asdl_seq * _gather_89_var;
+ D(fprintf(stderr, "%*c> _tmp_165[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs"));
+ asdl_seq * _gather_88_var;
Token * _literal;
asdl_seq* kwargs_var;
if (
- (_gather_89_var = _gather_89_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
+ (_gather_88_var = _gather_88_rule(p)) // ','.(starred_expression | (assignment_expression | expression !':=') !'=')+
&&
(_literal = _PyPegen_expect_token(p, 12)) // token=','
&&
(kwargs_var = kwargs_rule(p)) // kwargs
)
{
- D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs"));
- _res = _PyPegen_dummy_name(p, _gather_89_var, _literal, kwargs_var);
+ D(fprintf(stderr, "%*c+ _tmp_165[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs"));
+ _res = _PyPegen_dummy_name(p, _gather_88_var, _literal, kwargs_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_165[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.(starred_expression | (assignment_expression | expression !':=') !'=')+ ',' kwargs"));
}
_res = NULL;
@@ -37440,9 +37435,9 @@ _tmp_166_rule(Parser *p)
return _res;
}
-// _tmp_167: starred_expression !'='
+// _tmp_166: starred_expression !'='
static void *
-_tmp_167_rule(Parser *p)
+_tmp_166_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37458,7 +37453,7 @@ _tmp_167_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression !'='"));
+ D(fprintf(stderr, "%*c> _tmp_166[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression !'='"));
expr_ty starred_expression_var;
if (
(starred_expression_var = starred_expression_rule(p)) // starred_expression
@@ -37466,12 +37461,12 @@ _tmp_167_rule(Parser *p)
_PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='='
)
{
- D(fprintf(stderr, "%*c+ _tmp_167[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression !'='"));
+ D(fprintf(stderr, "%*c+ _tmp_166[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression !'='"));
_res = starred_expression_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_167[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_166[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression !'='"));
}
_res = NULL;
@@ -37480,9 +37475,9 @@ _tmp_167_rule(Parser *p)
return _res;
}
-// _tmp_168: !STRING expression_without_invalid
+// _tmp_167: !STRING expression_without_invalid
static void *
-_tmp_168_rule(Parser *p)
+_tmp_167_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37498,7 +37493,7 @@ _tmp_168_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!STRING expression_without_invalid"));
+ D(fprintf(stderr, "%*c> _tmp_167[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!STRING expression_without_invalid"));
expr_ty expression_without_invalid_var;
if (
_PyPegen_lookahead(0, _PyPegen_string_token, p)
@@ -37506,12 +37501,12 @@ _tmp_168_rule(Parser *p)
(expression_without_invalid_var = expression_without_invalid_rule(p)) // expression_without_invalid
)
{
- D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!STRING expression_without_invalid"));
+ D(fprintf(stderr, "%*c+ _tmp_167[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!STRING expression_without_invalid"));
_res = expression_without_invalid_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_167[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!STRING expression_without_invalid"));
}
_res = NULL;
@@ -37520,9 +37515,9 @@ _tmp_168_rule(Parser *p)
return _res;
}
-// _tmp_169: ')' | '**'
+// _tmp_168: ')' | '**'
static void *
-_tmp_169_rule(Parser *p)
+_tmp_168_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37538,18 +37533,18 @@ _tmp_169_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // '**'
@@ -37557,18 +37552,18 @@ _tmp_169_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_168[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_168[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_168[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
_res = NULL;
@@ -37577,9 +37572,9 @@ _tmp_169_rule(Parser *p)
return _res;
}
-// _tmp_170: ':' | '**'
+// _tmp_169: ':' | '**'
static void *
-_tmp_170_rule(Parser *p)
+_tmp_169_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37595,18 +37590,18 @@ _tmp_170_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 11)) // token=':'
)
{
- D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
+ D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_170[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'"));
}
{ // '**'
@@ -37614,18 +37609,18 @@ _tmp_170_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c> _tmp_169[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 35)) // token='**'
)
{
- D(fprintf(stderr, "%*c+ _tmp_170[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
+ D(fprintf(stderr, "%*c+ _tmp_169[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_170[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_169[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'"));
}
_res = NULL;
@@ -37634,9 +37629,9 @@ _tmp_170_rule(Parser *p)
return _res;
}
-// _loop0_171: (',' bitwise_or)
+// _loop0_170: (',' bitwise_or)
static asdl_seq *
-_loop0_171_rule(Parser *p)
+_loop0_170_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37661,13 +37656,13 @@ _loop0_171_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _loop0_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' bitwise_or)"));
- void *_tmp_175_var;
+ D(fprintf(stderr, "%*c> _loop0_170[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' bitwise_or)"));
+ void *_tmp_174_var;
while (
- (_tmp_175_var = _tmp_175_rule(p)) // ',' bitwise_or
+ (_tmp_174_var = _tmp_174_rule(p)) // ',' bitwise_or
)
{
- _res = _tmp_175_var;
+ _res = _tmp_174_var;
if (_n == _children_capacity) {
_children_capacity *= 2;
void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *));
@@ -37684,7 +37679,7 @@ _loop0_171_rule(Parser *p)
_mark = p->mark;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _loop0_171[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _loop0_170[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' bitwise_or)"));
}
asdl_seq *_seq = (asdl_seq*)_Py_asdl_generic_seq_new(_n, p->arena);
@@ -37701,9 +37696,9 @@ _loop0_171_rule(Parser *p)
return _seq;
}
-// _tmp_172: ',' | ')' | NEWLINE
+// _tmp_171: ',' | ')' | NEWLINE
static void *
-_tmp_172_rule(Parser *p)
+_tmp_171_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37719,18 +37714,18 @@ _tmp_172_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 12)) // token=','
)
{
- D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
+ D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','"));
}
{ // ')'
@@ -37738,18 +37733,18 @@ _tmp_172_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'"));
Token * _literal;
if (
(_literal = _PyPegen_expect_token(p, 8)) // token=')'
)
{
- D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
+ D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'"));
_res = _literal;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'"));
}
{ // NEWLINE
@@ -37757,18 +37752,18 @@ _tmp_172_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c> _tmp_171[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
Token * newline_var;
if (
(newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE'
)
{
- D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
+ D(fprintf(stderr, "%*c+ _tmp_171[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE"));
_res = newline_var;
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_171[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE"));
}
_res = NULL;
@@ -37777,9 +37772,9 @@ _tmp_172_rule(Parser *p)
return _res;
}
-// _tmp_173: expression ['as' star_target]
+// _tmp_172: expression ['as' star_target]
static void *
-_tmp_173_rule(Parser *p)
+_tmp_172_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37795,22 +37790,22 @@ _tmp_173_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_172[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expression_var;
if (
(expression_var = expression_rule(p)) // expression
&&
- (_opt_var = _tmp_176_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_175_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_172[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expression_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_172[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' star_target]"));
}
_res = NULL;
@@ -37819,9 +37814,9 @@ _tmp_173_rule(Parser *p)
return _res;
}
-// _tmp_174: expressions ['as' star_target]
+// _tmp_173: expressions ['as' star_target]
static void *
-_tmp_174_rule(Parser *p)
+_tmp_173_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37837,22 +37832,22 @@ _tmp_174_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c> _tmp_173[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
void *_opt_var;
UNUSED(_opt_var); // Silence compiler warnings
expr_ty expressions_var;
if (
(expressions_var = expressions_rule(p)) // expressions
&&
- (_opt_var = _tmp_176_rule(p), !p->error_indicator) // ['as' star_target]
+ (_opt_var = _tmp_175_rule(p), !p->error_indicator) // ['as' star_target]
)
{
- D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
+ D(fprintf(stderr, "%*c+ _tmp_173[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions ['as' star_target]"));
_res = _PyPegen_dummy_name(p, expressions_var, _opt_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_173[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions ['as' star_target]"));
}
_res = NULL;
@@ -37861,9 +37856,9 @@ _tmp_174_rule(Parser *p)
return _res;
}
-// _tmp_175: ',' bitwise_or
+// _tmp_174: ',' bitwise_or
static void *
-_tmp_175_rule(Parser *p)
+_tmp_174_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37879,7 +37874,7 @@ _tmp_175_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' bitwise_or"));
+ D(fprintf(stderr, "%*c> _tmp_174[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' bitwise_or"));
Token * _literal;
expr_ty bitwise_or_var;
if (
@@ -37888,12 +37883,12 @@ _tmp_175_rule(Parser *p)
(bitwise_or_var = bitwise_or_rule(p)) // bitwise_or
)
{
- D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' bitwise_or"));
+ D(fprintf(stderr, "%*c+ _tmp_174[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' bitwise_or"));
_res = _PyPegen_dummy_name(p, _literal, bitwise_or_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_174[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' bitwise_or"));
}
_res = NULL;
@@ -37902,9 +37897,9 @@ _tmp_175_rule(Parser *p)
return _res;
}
-// _tmp_176: 'as' star_target
+// _tmp_175: 'as' star_target
static void *
-_tmp_176_rule(Parser *p)
+_tmp_175_rule(Parser *p)
{
if (p->level++ == MAXSTACK || _Py_ReachedRecursionLimitWithMargin(PyThreadState_Get(), 1)) {
_Pypegen_stack_overflow(p);
@@ -37920,21 +37915,21 @@ _tmp_176_rule(Parser *p)
p->level--;
return NULL;
}
- D(fprintf(stderr, "%*c> _tmp_176[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c> _tmp_175[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
Token * _keyword;
expr_ty star_target_var;
if (
- (_keyword = _PyPegen_expect_token(p, 684)) // token='as'
+ (_keyword = _PyPegen_expect_token(p, 685)) // token='as'
&&
(star_target_var = star_target_rule(p)) // star_target
)
{
- D(fprintf(stderr, "%*c+ _tmp_176[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
+ D(fprintf(stderr, "%*c+ _tmp_175[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' star_target"));
_res = _PyPegen_dummy_name(p, _keyword, star_target_var);
goto done;
}
p->mark = _mark;
- D(fprintf(stderr, "%*c%s _tmp_176[%d-%d]: %s failed!\n", p->level, ' ',
+ D(fprintf(stderr, "%*c%s _tmp_175[%d-%d]: %s failed!\n", p->level, ' ',
p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' star_target"));
}
_res = NULL;
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 535e552e047..d9abc4c53d1 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -850,7 +850,7 @@ dummy_func(
DEOPT_IF(!res);
}
- pure op(_BINARY_OP_EXTEND, (descr/4, left, right -- res)) {
+ op(_BINARY_OP_EXTEND, (descr/4, left, right -- res)) {
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
assert(INLINE_CACHE_ENTRIES_BINARY_OP == 5);
@@ -2327,19 +2327,18 @@ dummy_func(
#endif /* ENABLE_SPECIALIZATION_FT */
}
- op(_LOAD_ATTR, (owner -- attr, self_or_null[oparg&1])) {
+ op(_LOAD_ATTR, (owner -- attr[1], self_or_null[oparg&1])) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
- PyObject *attr_o;
if (oparg & 1) {
/* Designed to work in tandem with CALL, pushes two values. */
- attr_o = NULL;
- int is_meth = _PyObject_GetMethod(PyStackRef_AsPyObjectBorrow(owner), name, &attr_o);
+ *attr = PyStackRef_NULL;
+ int is_meth = _PyObject_GetMethodStackRef(tstate, PyStackRef_AsPyObjectBorrow(owner), name, attr);
if (is_meth) {
/* We can bypass temporary bound method object.
meth is unbound method and obj is self.
meth | self | arg1 | ... | argN
*/
- assert(attr_o != NULL); // No errors on this branch
+ assert(!PyStackRef_IsNull(*attr)); // No errors on this branch
self_or_null[0] = owner; // Transfer ownership
DEAD(owner);
}
@@ -2351,17 +2350,17 @@ dummy_func(
meth | NULL | arg1 | ... | argN
*/
PyStackRef_CLOSE(owner);
- ERROR_IF(attr_o == NULL);
+ ERROR_IF(PyStackRef_IsNull(*attr));
self_or_null[0] = PyStackRef_NULL;
}
}
else {
/* Classic, pushes one value. */
- attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name);
+ PyObject *attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name);
PyStackRef_CLOSE(owner);
ERROR_IF(attr_o == NULL);
+ *attr = PyStackRef_FromPyObjectSteal(attr_o);
}
- attr = PyStackRef_FromPyObjectSteal(attr_o);
}
macro(LOAD_ATTR) =
@@ -2642,12 +2641,6 @@ dummy_func(
PyDictObject *dict = _PyObject_GetManagedDict(owner_o);
DEOPT_IF(dict == NULL);
DEOPT_IF(!LOCK_OBJECT(dict));
- #ifdef Py_GIL_DISABLED
- if (dict != _PyObject_GetManagedDict(owner_o)) {
- UNLOCK_OBJECT(dict);
- DEOPT_IF(true);
- }
- #endif
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
if (hint >= (size_t)dict->ma_keys->dk_nentries ||
diff --git a/Python/ceval.c b/Python/ceval.c
index d1de4875656..50665defd38 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -346,13 +346,13 @@ _Py_ReachedRecursionLimitWithMargin(PyThreadState *tstate, int margin_count)
{
uintptr_t here_addr = _Py_get_machine_stack_pointer();
_PyThreadStateImpl *_tstate = (_PyThreadStateImpl *)tstate;
- if (here_addr > _tstate->c_stack_soft_limit + margin_count * PYOS_STACK_MARGIN_BYTES) {
+ if (here_addr > _tstate->c_stack_soft_limit + margin_count * _PyOS_STACK_MARGIN_BYTES) {
return 0;
}
if (_tstate->c_stack_hard_limit == 0) {
_Py_InitializeRecursionLimits(tstate);
}
- return here_addr <= _tstate->c_stack_soft_limit + margin_count * PYOS_STACK_MARGIN_BYTES;
+ return here_addr <= _tstate->c_stack_soft_limit + margin_count * _PyOS_STACK_MARGIN_BYTES;
}
void
@@ -448,8 +448,8 @@ _Py_InitializeRecursionLimits(PyThreadState *tstate)
_tstate->c_stack_top = (uintptr_t)high;
ULONG guarantee = 0;
SetThreadStackGuarantee(&guarantee);
- _tstate->c_stack_hard_limit = ((uintptr_t)low) + guarantee + PYOS_STACK_MARGIN_BYTES;
- _tstate->c_stack_soft_limit = _tstate->c_stack_hard_limit + PYOS_STACK_MARGIN_BYTES;
+ _tstate->c_stack_hard_limit = ((uintptr_t)low) + guarantee + _PyOS_STACK_MARGIN_BYTES;
+ _tstate->c_stack_soft_limit = _tstate->c_stack_hard_limit + _PyOS_STACK_MARGIN_BYTES;
#else
uintptr_t here_addr = _Py_get_machine_stack_pointer();
# if defined(HAVE_PTHREAD_GETATTR_NP) && !defined(_AIX) && !defined(__NetBSD__)
@@ -469,9 +469,9 @@ _Py_InitializeRecursionLimits(PyThreadState *tstate)
// Thread sanitizer crashes if we use a bit more than half the stack.
_tstate->c_stack_soft_limit = base + (stack_size / 2);
#else
- _tstate->c_stack_soft_limit = base + PYOS_STACK_MARGIN_BYTES * 2;
+ _tstate->c_stack_soft_limit = base + _PyOS_STACK_MARGIN_BYTES * 2;
#endif
- _tstate->c_stack_hard_limit = base + PYOS_STACK_MARGIN_BYTES;
+ _tstate->c_stack_hard_limit = base + _PyOS_STACK_MARGIN_BYTES;
assert(_tstate->c_stack_soft_limit < here_addr);
assert(here_addr < _tstate->c_stack_top);
return;
@@ -479,7 +479,7 @@ _Py_InitializeRecursionLimits(PyThreadState *tstate)
# endif
_tstate->c_stack_top = _Py_SIZE_ROUND_UP(here_addr, 4096);
_tstate->c_stack_soft_limit = _tstate->c_stack_top - Py_C_STACK_SIZE;
- _tstate->c_stack_hard_limit = _tstate->c_stack_top - (Py_C_STACK_SIZE + PYOS_STACK_MARGIN_BYTES);
+ _tstate->c_stack_hard_limit = _tstate->c_stack_top - (Py_C_STACK_SIZE + _PyOS_STACK_MARGIN_BYTES);
#endif
}
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 46fc164a5b3..e152865e4ec 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -3301,20 +3301,20 @@
case _LOAD_ATTR: {
_PyStackRef owner;
- _PyStackRef attr;
+ _PyStackRef *attr;
_PyStackRef *self_or_null;
oparg = CURRENT_OPARG();
owner = stack_pointer[-1];
+ attr = &stack_pointer[-1];
self_or_null = &stack_pointer[0];
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
- PyObject *attr_o;
if (oparg & 1) {
- attr_o = NULL;
+ *attr = PyStackRef_NULL;
_PyFrame_SetStackPointer(frame, stack_pointer);
- int is_meth = _PyObject_GetMethod(PyStackRef_AsPyObjectBorrow(owner), name, &attr_o);
+ int is_meth = _PyObject_GetMethodStackRef(tstate, PyStackRef_AsPyObjectBorrow(owner), name, attr);
stack_pointer = _PyFrame_GetStackPointer(frame);
if (is_meth) {
- assert(attr_o != NULL);
+ assert(!PyStackRef_IsNull(*attr));
self_or_null[0] = owner;
}
else {
@@ -3323,7 +3323,7 @@
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(owner);
stack_pointer = _PyFrame_GetStackPointer(frame);
- if (attr_o == NULL) {
+ if (PyStackRef_IsNull(*attr)) {
JUMP_TO_ERROR();
}
self_or_null[0] = PyStackRef_NULL;
@@ -3332,7 +3332,7 @@
}
else {
_PyFrame_SetStackPointer(frame, stack_pointer);
- attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name);
+ PyObject *attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name);
stack_pointer = _PyFrame_GetStackPointer(frame);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@@ -3342,10 +3342,9 @@
if (attr_o == NULL) {
JUMP_TO_ERROR();
}
+ *attr = PyStackRef_FromPyObjectSteal(attr_o);
stack_pointer += 1;
}
- attr = PyStackRef_FromPyObjectSteal(attr_o);
- stack_pointer[-1] = attr;
stack_pointer += (oparg&1);
assert(WITHIN_STACK_BOUNDS());
break;
@@ -3705,15 +3704,6 @@
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
- #ifdef Py_GIL_DISABLED
- if (dict != _PyObject_GetManagedDict(owner_o)) {
- UNLOCK_OBJECT(dict);
- if (true) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- }
- #endif
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
if (hint >= (size_t)dict->ma_keys->dk_nentries ||
diff --git a/Python/flowgraph.c b/Python/flowgraph.c
index 2adc8c84d83..1cb6f03169e 100644
--- a/Python/flowgraph.c
+++ b/Python/flowgraph.c
@@ -1892,6 +1892,10 @@ eval_const_unaryop(PyObject *operand, int opcode, int oparg)
result = PyNumber_Negative(operand);
break;
case UNARY_INVERT:
+ // XXX: This should be removed once the ~bool depreciation expires.
+ if (PyBool_Check(operand)) {
+ return NULL;
+ }
result = PyNumber_Invert(operand);
break;
case UNARY_NOT: {
diff --git a/Python/gc.c b/Python/gc.c
index 7b0e6d6e803..02135a3fb44 100644
--- a/Python/gc.c
+++ b/Python/gc.c
@@ -1,6 +1,6 @@
// This implements the reference cycle garbage collector.
// The Python module interface to the collector is in gcmodule.c.
-// See https://devguide.python.org/internals/garbage-collector/
+// See InternalDocs/garbage_collector.md for more infromation.
#include "Python.h"
#include "pycore_ceval.h" // _Py_set_eval_breaker_bit()
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 8f7932f0033..aa1eb373b7b 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -7941,7 +7941,7 @@
_Py_CODEUNIT* const this_instr = next_instr - 10;
(void)this_instr;
_PyStackRef owner;
- _PyStackRef attr;
+ _PyStackRef *attr;
_PyStackRef *self_or_null;
// _SPECIALIZE_LOAD_ATTR
{
@@ -7964,16 +7964,16 @@
/* Skip 8 cache entries */
// _LOAD_ATTR
{
+ attr = &stack_pointer[-1];
self_or_null = &stack_pointer[0];
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg >> 1);
- PyObject *attr_o;
if (oparg & 1) {
- attr_o = NULL;
+ *attr = PyStackRef_NULL;
_PyFrame_SetStackPointer(frame, stack_pointer);
- int is_meth = _PyObject_GetMethod(PyStackRef_AsPyObjectBorrow(owner), name, &attr_o);
+ int is_meth = _PyObject_GetMethodStackRef(tstate, PyStackRef_AsPyObjectBorrow(owner), name, attr);
stack_pointer = _PyFrame_GetStackPointer(frame);
if (is_meth) {
- assert(attr_o != NULL);
+ assert(!PyStackRef_IsNull(*attr));
self_or_null[0] = owner;
}
else {
@@ -7982,7 +7982,7 @@
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(owner);
stack_pointer = _PyFrame_GetStackPointer(frame);
- if (attr_o == NULL) {
+ if (PyStackRef_IsNull(*attr)) {
JUMP_TO_LABEL(error);
}
self_or_null[0] = PyStackRef_NULL;
@@ -7991,7 +7991,7 @@
}
else {
_PyFrame_SetStackPointer(frame, stack_pointer);
- attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name);
+ PyObject *attr_o = PyObject_GetAttr(PyStackRef_AsPyObjectBorrow(owner), name);
stack_pointer = _PyFrame_GetStackPointer(frame);
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
@@ -8001,11 +8001,10 @@
if (attr_o == NULL) {
JUMP_TO_LABEL(error);
}
+ *attr = PyStackRef_FromPyObjectSteal(attr_o);
stack_pointer += 1;
}
- attr = PyStackRef_FromPyObjectSteal(attr_o);
}
- stack_pointer[-1] = attr;
stack_pointer += (oparg&1);
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
@@ -11065,16 +11064,6 @@
assert(_PyOpcode_Deopt[opcode] == (STORE_ATTR));
JUMP_TO_PREDICTED(STORE_ATTR);
}
- #ifdef Py_GIL_DISABLED
- if (dict != _PyObject_GetManagedDict(owner_o)) {
- UNLOCK_OBJECT(dict);
- if (true) {
- UPDATE_MISS_STATS(STORE_ATTR);
- assert(_PyOpcode_Deopt[opcode] == (STORE_ATTR));
- JUMP_TO_PREDICTED(STORE_ATTR);
- }
- }
- #endif
assert(PyDict_CheckExact((PyObject *)dict));
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
if (hint >= (size_t)dict->ma_keys->dk_nentries ||
diff --git a/Python/lock.c b/Python/lock.c
index ea6ac00bfec..a49d587a168 100644
--- a/Python/lock.c
+++ b/Python/lock.c
@@ -95,6 +95,18 @@ _PyMutex_LockTimed(PyMutex *m, PyTime_t timeout, _PyLockFlags flags)
if (timeout == 0) {
return PY_LOCK_FAILURE;
}
+ if ((flags & _PY_LOCK_PYTHONLOCK) && Py_IsFinalizing()) {
+ // At this phase of runtime shutdown, only the finalization thread
+ // can have attached thread state; others hang if they try
+ // attaching. And since operations on this lock requires attached
+ // thread state (_PY_LOCK_PYTHONLOCK), the finalization thread is
+ // running this code, and no other thread can unlock.
+ // Raise rather than hang. (_PY_LOCK_PYTHONLOCK allows raising
+ // exceptons.)
+ PyErr_SetString(PyExc_PythonFinalizationError,
+ "cannot acquire lock at interpreter finalization");
+ return PY_LOCK_FAILURE;
+ }
uint8_t newv = v;
if (!(v & _Py_HAS_PARKED)) {
@@ -622,3 +634,11 @@ PyMutex_Unlock(PyMutex *m)
Py_FatalError("unlocking mutex that is not locked");
}
}
+
+
+#undef PyMutex_IsLocked
+int
+PyMutex_IsLocked(PyMutex *m)
+{
+ return _PyMutex_IsLocked(m);
+}
diff --git a/Python/marshal.c b/Python/marshal.c
index afbef6ee679..15dd25d6268 100644
--- a/Python/marshal.c
+++ b/Python/marshal.c
@@ -1656,6 +1656,9 @@ r_object(RFILE *p)
case TYPE_SLICE:
{
Py_ssize_t idx = r_ref_reserve(flag, p);
+ if (idx < 0) {
+ break;
+ }
PyObject *stop = NULL;
PyObject *step = NULL;
PyObject *start = r_object(p);
diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c
index 145a8c118d3..fab6fef5ccd 100644
--- a/Python/optimizer_analysis.c
+++ b/Python/optimizer_analysis.c
@@ -26,6 +26,8 @@
#include "pycore_function.h"
#include "pycore_uop_ids.h"
#include "pycore_range.h"
+#include "pycore_unicodeobject.h"
+#include "pycore_ceval.h"
#include <stdarg.h>
#include <stdbool.h>
@@ -321,7 +323,10 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer,
/* Shortened forms for convenience, used in optimizer_bytecodes.c */
#define sym_is_not_null _Py_uop_sym_is_not_null
#define sym_is_const _Py_uop_sym_is_const
+#define sym_is_safe_const _Py_uop_sym_is_safe_const
#define sym_get_const _Py_uop_sym_get_const
+#define sym_new_const_steal _Py_uop_sym_new_const_steal
+#define sym_get_const_as_stackref _Py_uop_sym_get_const_as_stackref
#define sym_new_unknown _Py_uop_sym_new_unknown
#define sym_new_not_null _Py_uop_sym_new_not_null
#define sym_new_type _Py_uop_sym_new_type
@@ -350,6 +355,8 @@ remove_globals(_PyInterpreterFrame *frame, _PyUOpInstruction *buffer,
#define sym_new_compact_int _Py_uop_sym_new_compact_int
#define sym_new_truthiness _Py_uop_sym_new_truthiness
+#define JUMP_TO_LABEL(label) goto label;
+
static int
optimize_to_bool(
_PyUOpInstruction *this_instr,
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index f8a0484bdc2..aeff76affd8 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -181,6 +181,7 @@ dummy_func(void) {
}
op(_BINARY_OP, (lhs, rhs -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(lhs, rhs);
bool lhs_int = sym_matches_type(lhs, &PyLong_Type);
bool rhs_int = sym_matches_type(rhs, &PyLong_Type);
bool lhs_float = sym_matches_type(lhs, &PyFloat_Type);
@@ -235,35 +236,23 @@ dummy_func(void) {
}
op(_BINARY_OP_ADD_INT, (left, right -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
res = sym_new_compact_int(ctx);
}
op(_BINARY_OP_SUBTRACT_INT, (left, right -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
res = sym_new_compact_int(ctx);
}
op(_BINARY_OP_MULTIPLY_INT, (left, right -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
res = sym_new_compact_int(ctx);
}
op(_BINARY_OP_ADD_FLOAT, (left, right -- res)) {
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyFloat_CheckExact(sym_get_const(ctx, left)));
- assert(PyFloat_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyFloat_FromDouble(
- PyFloat_AS_DOUBLE(sym_get_const(ctx, left)) +
- PyFloat_AS_DOUBLE(sym_get_const(ctx, right)));
- if (temp == NULL) {
- goto error;
- }
- res = sym_new_const(ctx, temp);
- Py_DECREF(temp);
- // TODO gh-115506:
- // replace opcode with constant propagated one and update tests!
- }
- else {
- res = sym_new_type(ctx, &PyFloat_Type);
- }
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
+ res = sym_new_type(ctx, &PyFloat_Type);
// TODO (gh-134584): Refactor this to use another uop
if (PyJitRef_IsBorrowed(left) && PyJitRef_IsBorrowed(right)) {
REPLACE_OP(this_instr, op_without_decref_inputs[opcode], oparg, 0);
@@ -271,23 +260,8 @@ dummy_func(void) {
}
op(_BINARY_OP_SUBTRACT_FLOAT, (left, right -- res)) {
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyFloat_CheckExact(sym_get_const(ctx, left)));
- assert(PyFloat_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyFloat_FromDouble(
- PyFloat_AS_DOUBLE(sym_get_const(ctx, left)) -
- PyFloat_AS_DOUBLE(sym_get_const(ctx, right)));
- if (temp == NULL) {
- goto error;
- }
- res = sym_new_const(ctx, temp);
- Py_DECREF(temp);
- // TODO gh-115506:
- // replace opcode with constant propagated one and update tests!
- }
- else {
- res = sym_new_type(ctx, &PyFloat_Type);
- }
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
+ res = sym_new_type(ctx, &PyFloat_Type);
// TODO (gh-134584): Refactor this to use another uop
if (PyJitRef_IsBorrowed(left) && PyJitRef_IsBorrowed(right)) {
REPLACE_OP(this_instr, op_without_decref_inputs[opcode], oparg, 0);
@@ -295,23 +269,8 @@ dummy_func(void) {
}
op(_BINARY_OP_MULTIPLY_FLOAT, (left, right -- res)) {
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyFloat_CheckExact(sym_get_const(ctx, left)));
- assert(PyFloat_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyFloat_FromDouble(
- PyFloat_AS_DOUBLE(sym_get_const(ctx, left)) *
- PyFloat_AS_DOUBLE(sym_get_const(ctx, right)));
- if (temp == NULL) {
- goto error;
- }
- res = sym_new_const(ctx, temp);
- Py_DECREF(temp);
- // TODO gh-115506:
- // replace opcode with constant propagated one and update tests!
- }
- else {
- res = sym_new_type(ctx, &PyFloat_Type);
- }
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
+ res = sym_new_type(ctx, &PyFloat_Type);
// TODO (gh-134584): Refactor this to use another uop
if (PyJitRef_IsBorrowed(left) && PyJitRef_IsBorrowed(right)) {
REPLACE_OP(this_instr, op_without_decref_inputs[opcode], oparg, 0);
@@ -319,19 +278,8 @@ dummy_func(void) {
}
op(_BINARY_OP_ADD_UNICODE, (left, right -- res)) {
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyUnicode_CheckExact(sym_get_const(ctx, left)));
- assert(PyUnicode_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyUnicode_Concat(sym_get_const(ctx, left), sym_get_const(ctx, right));
- if (temp == NULL) {
- goto error;
- }
- res = sym_new_const(ctx, temp);
- Py_DECREF(temp);
- }
- else {
- res = sym_new_type(ctx, &PyUnicode_Type);
- }
+ REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
+ res = sym_new_type(ctx, &PyUnicode_Type);
}
op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right -- )) {
@@ -443,6 +391,7 @@ dummy_func(void) {
}
op(_UNARY_NOT, (value -- res)) {
+ REPLACE_OPCODE_IF_EVALUATES_PURE(value);
sym_set_type(value, &PyBool_Type);
res = sym_new_truthiness(ctx, value, false);
}
@@ -641,9 +590,9 @@ dummy_func(void) {
}
}
- op(_LOAD_ATTR, (owner -- attr, self_or_null[oparg&1])) {
+ op(_LOAD_ATTR, (owner -- attr[1], self_or_null[oparg&1])) {
(void)owner;
- attr = sym_new_not_null(ctx);
+ *attr = sym_new_not_null(ctx);
if (oparg & 1) {
self_or_null[0] = sym_new_unknown(ctx);
}
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index 10767ccdbd5..41402200c16 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -206,6 +206,21 @@
JitOptRef value;
JitOptRef res;
value = stack_pointer[-1];
+ if (
+ sym_is_safe_const(ctx, value)
+ ) {
+ JitOptRef value_sym = value;
+ _PyStackRef value = sym_get_const_as_stackref(ctx, value_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ assert(PyStackRef_BoolCheck(value));
+ res_stackref = PyStackRef_IsFalse(value)
+ ? PyStackRef_True : PyStackRef_False;
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-1] = res;
+ break;
+ }
sym_set_type(value, &PyBool_Type);
res = sym_new_truthiness(ctx, value, false);
stack_pointer[-1] = res;
@@ -391,7 +406,41 @@
}
case _BINARY_OP_MULTIPLY_INT: {
+ JitOptRef right;
+ JitOptRef left;
JitOptRef res;
+ right = stack_pointer[-1];
+ left = stack_pointer[-2];
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyLong_CheckExact(left_o));
+ assert(PyLong_CheckExact(right_o));
+ assert(_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o));
+ STAT_INC(BINARY_OP, hit);
+ res_stackref = _PyCompactLong_Multiply((PyLongObject *)left_o, (PyLongObject *)right_o);
+ if (PyStackRef_IsNull(res_stackref )) {
+ ctx->done = true;
+ break;
+ }
+ PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc);
+ PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
res = sym_new_compact_int(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
@@ -400,7 +449,41 @@
}
case _BINARY_OP_ADD_INT: {
+ JitOptRef right;
+ JitOptRef left;
JitOptRef res;
+ right = stack_pointer[-1];
+ left = stack_pointer[-2];
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyLong_CheckExact(left_o));
+ assert(PyLong_CheckExact(right_o));
+ assert(_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o));
+ STAT_INC(BINARY_OP, hit);
+ res_stackref = _PyCompactLong_Add((PyLongObject *)left_o, (PyLongObject *)right_o);
+ if (PyStackRef_IsNull(res_stackref )) {
+ ctx->done = true;
+ break;
+ }
+ PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc);
+ PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
res = sym_new_compact_int(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
@@ -409,7 +492,41 @@
}
case _BINARY_OP_SUBTRACT_INT: {
+ JitOptRef right;
+ JitOptRef left;
JitOptRef res;
+ right = stack_pointer[-1];
+ left = stack_pointer[-2];
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyLong_CheckExact(left_o));
+ assert(PyLong_CheckExact(right_o));
+ assert(_PyLong_BothAreCompact((PyLongObject *)left_o, (PyLongObject *)right_o));
+ STAT_INC(BINARY_OP, hit);
+ res_stackref = _PyCompactLong_Subtract((PyLongObject *)left_o, (PyLongObject *)right_o);
+ if (PyStackRef_IsNull(res_stackref )) {
+ ctx->done = true;
+ break;
+ }
+ PyStackRef_CLOSE_SPECIALIZED(right, _PyLong_ExactDealloc);
+ PyStackRef_CLOSE_SPECIALIZED(left, _PyLong_ExactDealloc);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
res = sym_new_compact_int(ctx);
stack_pointer[-2] = res;
stack_pointer += -1;
@@ -443,29 +560,42 @@
JitOptRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyFloat_CheckExact(sym_get_const(ctx, left)));
- assert(PyFloat_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyFloat_FromDouble(
- PyFloat_AS_DOUBLE(sym_get_const(ctx, left)) *
- PyFloat_AS_DOUBLE(sym_get_const(ctx, right)));
- if (temp == NULL) {
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyFloat_CheckExact(left_o));
+ assert(PyFloat_CheckExact(right_o));
+ STAT_INC(BINARY_OP, hit);
+ double dres =
+ ((PyFloatObject *)left_o)->ob_fval *
+ ((PyFloatObject *)right_o)->ob_fval;
+ res_stackref = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
+ if (PyStackRef_IsNull(res_stackref )) {
goto error;
}
- res = sym_new_const(ctx, temp);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
- Py_DECREF(temp);
- }
- else {
- res = sym_new_type(ctx, &PyFloat_Type);
- stack_pointer += -1;
+ break;
}
+ res = sym_new_type(ctx, &PyFloat_Type);
if (PyJitRef_IsBorrowed(left) && PyJitRef_IsBorrowed(right)) {
REPLACE_OP(this_instr, op_without_decref_inputs[opcode], oparg, 0);
}
- stack_pointer[-1] = res;
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -475,29 +605,42 @@
JitOptRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyFloat_CheckExact(sym_get_const(ctx, left)));
- assert(PyFloat_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyFloat_FromDouble(
- PyFloat_AS_DOUBLE(sym_get_const(ctx, left)) +
- PyFloat_AS_DOUBLE(sym_get_const(ctx, right)));
- if (temp == NULL) {
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyFloat_CheckExact(left_o));
+ assert(PyFloat_CheckExact(right_o));
+ STAT_INC(BINARY_OP, hit);
+ double dres =
+ ((PyFloatObject *)left_o)->ob_fval +
+ ((PyFloatObject *)right_o)->ob_fval;
+ res_stackref = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
+ if (PyStackRef_IsNull(res_stackref )) {
goto error;
}
- res = sym_new_const(ctx, temp);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
- Py_DECREF(temp);
- }
- else {
- res = sym_new_type(ctx, &PyFloat_Type);
- stack_pointer += -1;
+ break;
}
+ res = sym_new_type(ctx, &PyFloat_Type);
if (PyJitRef_IsBorrowed(left) && PyJitRef_IsBorrowed(right)) {
REPLACE_OP(this_instr, op_without_decref_inputs[opcode], oparg, 0);
}
- stack_pointer[-1] = res;
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -507,29 +650,42 @@
JitOptRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyFloat_CheckExact(sym_get_const(ctx, left)));
- assert(PyFloat_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyFloat_FromDouble(
- PyFloat_AS_DOUBLE(sym_get_const(ctx, left)) -
- PyFloat_AS_DOUBLE(sym_get_const(ctx, right)));
- if (temp == NULL) {
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyFloat_CheckExact(left_o));
+ assert(PyFloat_CheckExact(right_o));
+ STAT_INC(BINARY_OP, hit);
+ double dres =
+ ((PyFloatObject *)left_o)->ob_fval -
+ ((PyFloatObject *)right_o)->ob_fval;
+ res_stackref = _PyFloat_FromDouble_ConsumeInputs(left, right, dres);
+ if (PyStackRef_IsNull(res_stackref )) {
goto error;
}
- res = sym_new_const(ctx, temp);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
- Py_DECREF(temp);
- }
- else {
- res = sym_new_type(ctx, &PyFloat_Type);
- stack_pointer += -1;
+ break;
}
+ res = sym_new_type(ctx, &PyFloat_Type);
if (PyJitRef_IsBorrowed(left) && PyJitRef_IsBorrowed(right)) {
REPLACE_OP(this_instr, op_without_decref_inputs[opcode], oparg, 0);
}
- stack_pointer[-1] = res;
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -566,24 +722,39 @@
JitOptRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
- if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
- assert(PyUnicode_CheckExact(sym_get_const(ctx, left)));
- assert(PyUnicode_CheckExact(sym_get_const(ctx, right)));
- PyObject *temp = PyUnicode_Concat(sym_get_const(ctx, left), sym_get_const(ctx, right));
- if (temp == NULL) {
+ if (
+ sym_is_safe_const(ctx, left) &&
+ sym_is_safe_const(ctx, right)
+ ) {
+ JitOptRef left_sym = left;
+ JitOptRef right_sym = right;
+ _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
+ _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ assert(PyUnicode_CheckExact(left_o));
+ assert(PyUnicode_CheckExact(right_o));
+ STAT_INC(BINARY_OP, hit);
+ PyObject *res_o = PyUnicode_Concat(left_o, right_o);
+ PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc);
+ PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
+ if (res_o == NULL) {
goto error;
}
- res = sym_new_const(ctx, temp);
+ res_stackref = PyStackRef_FromPyObjectSteal(res_o);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
stack_pointer[-2] = res;
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
- Py_DECREF(temp);
- }
- else {
- res = sym_new_type(ctx, &PyUnicode_Type);
- stack_pointer += -1;
+ break;
}
- stack_pointer[-1] = res;
+ res = sym_new_type(ctx, &PyUnicode_Type);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -1243,16 +1414,16 @@
case _LOAD_ATTR: {
JitOptRef owner;
- JitOptRef attr;
+ JitOptRef *attr;
JitOptRef *self_or_null;
owner = stack_pointer[-1];
+ attr = &stack_pointer[-1];
self_or_null = &stack_pointer[0];
(void)owner;
- attr = sym_new_not_null(ctx);
+ *attr = sym_new_not_null(ctx);
if (oparg & 1) {
self_or_null[0] = sym_new_unknown(ctx);
}
- stack_pointer[-1] = attr;
stack_pointer += (oparg&1);
assert(WITHIN_STACK_BOUNDS());
break;
@@ -2539,6 +2710,31 @@
JitOptRef res;
rhs = stack_pointer[-1];
lhs = stack_pointer[-2];
+ if (
+ sym_is_safe_const(ctx, lhs) &&
+ sym_is_safe_const(ctx, rhs)
+ ) {
+ JitOptRef lhs_sym = lhs;
+ JitOptRef rhs_sym = rhs;
+ _PyStackRef lhs = sym_get_const_as_stackref(ctx, lhs_sym);
+ _PyStackRef rhs = sym_get_const_as_stackref(ctx, rhs_sym);
+ _PyStackRef res_stackref;
+ /* Start of uop copied from bytecodes for constant evaluation */
+ PyObject *lhs_o = PyStackRef_AsPyObjectBorrow(lhs);
+ PyObject *rhs_o = PyStackRef_AsPyObjectBorrow(rhs);
+ assert(_PyEval_BinaryOps[oparg]);
+ PyObject *res_o = _PyEval_BinaryOps[oparg](lhs_o, rhs_o);
+ if (res_o == NULL) {
+ JUMP_TO_LABEL(error);
+ }
+ res_stackref = PyStackRef_FromPyObjectSteal(res_o);
+ /* End of uop copied from bytecodes for constant evaluation */
+ res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
bool lhs_int = sym_matches_type(lhs, &PyLong_Type);
bool rhs_int = sym_matches_type(rhs, &PyLong_Type);
bool lhs_float = sym_matches_type(lhs, &PyFloat_Type);
diff --git a/Python/optimizer_symbols.c b/Python/optimizer_symbols.c
index c3d9e0e778b..8a3df236c80 100644
--- a/Python/optimizer_symbols.c
+++ b/Python/optimizer_symbols.c
@@ -185,6 +185,37 @@ _Py_uop_sym_get_const(JitOptContext *ctx, JitOptRef ref)
return NULL;
}
+_PyStackRef
+_Py_uop_sym_get_const_as_stackref(JitOptContext *ctx, JitOptRef sym)
+{
+ PyObject *const_val = _Py_uop_sym_get_const(ctx, sym);
+ if (const_val == NULL) {
+ return PyStackRef_NULL;
+ }
+ return PyStackRef_FromPyObjectBorrow(const_val);
+}
+
+/*
+ Indicates whether the constant is safe to constant evaluate
+ (without side effects).
+ */
+bool
+_Py_uop_sym_is_safe_const(JitOptContext *ctx, JitOptRef sym)
+{
+ PyObject *const_val = _Py_uop_sym_get_const(ctx, sym);
+ if (const_val == NULL) {
+ return false;
+ }
+ if (_PyLong_CheckExactAndCompact(const_val)) {
+ return true;
+ }
+ PyTypeObject *typ = Py_TYPE(const_val);
+ return (typ == &PyUnicode_Type) ||
+ (typ == &PyFloat_Type) ||
+ (typ == &_PyNone_Type) ||
+ (typ == &PyBool_Type);
+}
+
void
_Py_uop_sym_set_type(JitOptContext *ctx, JitOptRef ref, PyTypeObject *typ)
{
@@ -468,6 +499,16 @@ _Py_uop_sym_new_const(JitOptContext *ctx, PyObject *const_val)
}
JitOptRef
+_Py_uop_sym_new_const_steal(JitOptContext *ctx, PyObject *const_val)
+{
+ assert(const_val != NULL);
+ JitOptRef res = _Py_uop_sym_new_const(ctx, const_val);
+ // Decref once because sym_new_const increfs it.
+ Py_DECREF(const_val);
+ return res;
+}
+
+JitOptRef
_Py_uop_sym_new_null(JitOptContext *ctx)
{
JitOptSymbol *null_sym = sym_new(ctx);
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 724fda63511..00e8d030765 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -1702,8 +1702,10 @@ finalize_modules(PyThreadState *tstate)
#endif
// Stop watching __builtin__ modifications
- PyDict_Unwatch(0, interp->builtins);
-
+ if (PyDict_Unwatch(0, interp->builtins) < 0) {
+ // might happen if interp is cleared before watching the __builtin__
+ PyErr_Clear();
+ }
PyObject *modules = _PyImport_GetModules(interp);
if (modules == NULL) {
// Already done
@@ -2377,15 +2379,13 @@ new_interpreter(PyThreadState **tstate_p,
error:
*tstate_p = NULL;
if (tstate != NULL) {
- PyThreadState_Clear(tstate);
- _PyThreadState_Detach(tstate);
- PyThreadState_Delete(tstate);
+ Py_EndInterpreter(tstate);
+ } else {
+ PyInterpreterState_Delete(interp);
}
if (save_tstate != NULL) {
_PyThreadState_Attach(save_tstate);
}
- PyInterpreterState_Delete(interp);
-
return status;
}
diff --git a/Python/remote_debug.h b/Python/remote_debug.h
index 8f9b6cd4c49..d1fcb478d2b 100644
--- a/Python/remote_debug.h
+++ b/Python/remote_debug.h
@@ -110,14 +110,6 @@ get_page_size(void) {
return page_size;
}
-typedef struct page_cache_entry {
- uintptr_t page_addr; // page-aligned base address
- char *data;
- int valid;
- struct page_cache_entry *next;
-} page_cache_entry_t;
-
-#define MAX_PAGES 1024
// Define a platform-independent process handle structure
typedef struct {
@@ -129,27 +121,9 @@ typedef struct {
#elif defined(__linux__)
int memfd;
#endif
- page_cache_entry_t pages[MAX_PAGES];
Py_ssize_t page_size;
} proc_handle_t;
-static void
-_Py_RemoteDebug_FreePageCache(proc_handle_t *handle)
-{
- for (int i = 0; i < MAX_PAGES; i++) {
- PyMem_RawFree(handle->pages[i].data);
- handle->pages[i].data = NULL;
- handle->pages[i].valid = 0;
- }
-}
-
-UNUSED static void
-_Py_RemoteDebug_ClearCache(proc_handle_t *handle)
-{
- for (int i = 0; i < MAX_PAGES; i++) {
- handle->pages[i].valid = 0;
- }
-}
#if defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
static mach_port_t pid_to_task(pid_t pid);
@@ -178,10 +152,6 @@ _Py_RemoteDebug_InitProcHandle(proc_handle_t *handle, pid_t pid) {
handle->memfd = -1;
#endif
handle->page_size = get_page_size();
- for (int i = 0; i < MAX_PAGES; i++) {
- handle->pages[i].data = NULL;
- handle->pages[i].valid = 0;
- }
return 0;
}
@@ -200,7 +170,6 @@ _Py_RemoteDebug_CleanupProcHandle(proc_handle_t *handle) {
}
#endif
handle->pid = 0;
- _Py_RemoteDebug_FreePageCache(handle);
}
#if defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
@@ -1066,53 +1035,6 @@ _Py_RemoteDebug_PagedReadRemoteMemory(proc_handle_t *handle,
size_t size,
void *out)
{
- size_t page_size = handle->page_size;
- uintptr_t page_base = addr & ~(page_size - 1);
- size_t offset_in_page = addr - page_base;
-
- if (offset_in_page + size > page_size) {
- return _Py_RemoteDebug_ReadRemoteMemory(handle, addr, size, out);
- }
-
- // Search for valid cached page
- for (int i = 0; i < MAX_PAGES; i++) {
- page_cache_entry_t *entry = &handle->pages[i];
- if (entry->valid && entry->page_addr == page_base) {
- memcpy(out, entry->data + offset_in_page, size);
- return 0;
- }
- }
-
- // Find reusable slot
- for (int i = 0; i < MAX_PAGES; i++) {
- page_cache_entry_t *entry = &handle->pages[i];
- if (!entry->valid) {
- if (entry->data == NULL) {
- entry->data = PyMem_RawMalloc(page_size);
- if (entry->data == NULL) {
- _set_debug_exception_cause(PyExc_MemoryError,
- "Cannot allocate %zu bytes for page cache entry "
- "during read from PID %d at address 0x%lx",
- page_size, handle->pid, addr);
- return -1;
- }
- }
-
- if (_Py_RemoteDebug_ReadRemoteMemory(handle, page_base, page_size, entry->data) < 0) {
- // Try to just copy the exact ammount as a fallback
- PyErr_Clear();
- goto fallback;
- }
-
- entry->page_addr = page_base;
- entry->valid = 1;
- memcpy(out, entry->data + offset_in_page, size);
- return 0;
- }
- }
-
-fallback:
- // Cache full — fallback to uncached read
return _Py_RemoteDebug_ReadRemoteMemory(handle, addr, size, out);
}
diff --git a/Python/stdlib_module_names.h b/Python/stdlib_module_names.h
index 56e349a544c..63e4599c31e 100644
--- a/Python/stdlib_module_names.h
+++ b/Python/stdlib_module_names.h
@@ -245,9 +245,6 @@ static const char* _Py_stdlib_module_names[] = {
"socket",
"socketserver",
"sqlite3",
-"sre_compile",
-"sre_constants",
-"sre_parse",
"ssl",
"stat",
"statistics",
diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py
index 47de205c0e9..4c210fbf8d2 100644
--- a/Tools/cases_generator/generators_common.py
+++ b/Tools/cases_generator/generators_common.py
@@ -106,8 +106,9 @@ class Emitter:
out: CWriter
labels: dict[str, Label]
_replacers: dict[str, ReplacementFunctionType]
+ cannot_escape: bool
- def __init__(self, out: CWriter, labels: dict[str, Label]):
+ def __init__(self, out: CWriter, labels: dict[str, Label], cannot_escape: bool = False):
self._replacers = {
"EXIT_IF": self.exit_if,
"DEOPT_IF": self.deopt_if,
@@ -127,6 +128,7 @@ class Emitter:
}
self.out = out
self.labels = labels
+ self.cannot_escape = cannot_escape
def dispatch(
self,
@@ -238,7 +240,8 @@ class Emitter:
next(tkn_iter)
self._print_storage("DECREF_INPUTS", storage)
try:
- storage.close_inputs(self.out)
+ if not self.cannot_escape:
+ storage.close_inputs(self.out)
except StackError as ex:
raise analysis_error(ex.args[0], tkn)
except Exception as ex:
@@ -476,7 +479,7 @@ class Emitter:
reachable = True
tkn = stmt.contents[-1]
try:
- if stmt in uop.properties.escaping_calls:
+ if stmt in uop.properties.escaping_calls and not self.cannot_escape:
escape = uop.properties.escaping_calls[stmt]
if escape.kills is not None:
self.stackref_kill(escape.kills, storage, True)
@@ -513,7 +516,7 @@ class Emitter:
self.out.emit(tkn)
else:
self.out.emit(tkn)
- if stmt in uop.properties.escaping_calls:
+ if stmt in uop.properties.escaping_calls and not self.cannot_escape:
self.emit_reload(storage)
return reachable, None, storage
except StackError as ex:
diff --git a/Tools/cases_generator/optimizer_generator.py b/Tools/cases_generator/optimizer_generator.py
index 3b4fe64b02a..81ae534bdda 100644
--- a/Tools/cases_generator/optimizer_generator.py
+++ b/Tools/cases_generator/optimizer_generator.py
@@ -12,6 +12,8 @@ from analyzer import (
analyze_files,
StackItem,
analysis_error,
+ CodeSection,
+ Label,
)
from generators_common import (
DEFAULT_INPUT,
@@ -19,6 +21,7 @@ from generators_common import (
write_header,
Emitter,
TokenIterator,
+ always_true,
)
from cwriter import CWriter
from typing import TextIO
@@ -75,6 +78,9 @@ def type_name(var: StackItem) -> str:
return "JitOptRef *"
return "JitOptRef "
+def stackref_type_name(var: StackItem) -> str:
+ assert not var.is_array(), "Unsafe to convert a symbol to an array-like StackRef."
+ return "_PyStackRef "
def declare_variables(uop: Uop, out: CWriter, skip_inputs: bool) -> None:
variables = {"unused"}
@@ -135,6 +141,12 @@ def emit_default(out: CWriter, uop: Uop, stack: Stack) -> None:
class OptimizerEmitter(Emitter):
+ def __init__(self, out: CWriter, labels: dict[str, Label], original_uop: Uop, stack: Stack):
+ super().__init__(out, labels)
+ self._replacers["REPLACE_OPCODE_IF_EVALUATES_PURE"] = self.replace_opcode_if_evaluates_pure
+ self.original_uop = original_uop
+ self.stack = stack
+
def emit_save(self, storage: Storage) -> None:
storage.flush(self.out)
@@ -145,6 +157,186 @@ class OptimizerEmitter(Emitter):
self.out.emit(goto)
self.out.emit(label)
+ def replace_opcode_if_evaluates_pure(
+ self,
+ tkn: Token,
+ tkn_iter: TokenIterator,
+ uop: CodeSection,
+ storage: Storage,
+ inst: Instruction | None,
+ ) -> bool:
+ assert isinstance(uop, Uop)
+ input_identifiers = []
+ for token in tkn_iter:
+ if token.kind == "IDENTIFIER":
+ input_identifiers.append(token)
+ if token.kind == "SEMI":
+ break
+
+ if len(input_identifiers) == 0:
+ raise analysis_error(
+ "To evaluate an operation as pure, it must have at least 1 input",
+ tkn
+ )
+ # Check that the input identifiers belong to the uop's
+ # input stack effect
+ uop_stack_effect_input_identifers = {inp.name for inp in uop.stack.inputs}
+ for input_tkn in input_identifiers:
+ if input_tkn.text not in uop_stack_effect_input_identifers:
+ raise analysis_error(f"{input_tkn.text} referenced in "
+ f"REPLACE_OPCODE_IF_EVALUATES_PURE but does not "
+ f"exist in the base uop's input stack effects",
+ input_tkn)
+ input_identifiers_as_str = {tkn.text for tkn in input_identifiers}
+ used_stack_inputs = [inp for inp in uop.stack.inputs if inp.name in input_identifiers_as_str]
+ assert len(used_stack_inputs) > 0
+ emitter = OptimizerConstantEmitter(self.out, {}, self.original_uop, self.stack.copy())
+ emitter.emit("if (\n")
+ for inp in used_stack_inputs[:-1]:
+ emitter.emit(f"sym_is_safe_const(ctx, {inp.name}) &&\n")
+ emitter.emit(f"sym_is_safe_const(ctx, {used_stack_inputs[-1].name})\n")
+ emitter.emit(') {\n')
+ # Declare variables, before they are shadowed.
+ for inp in used_stack_inputs:
+ if inp.used:
+ emitter.emit(f"{type_name(inp)}{inp.name}_sym = {inp.name};\n")
+ # Shadow the symbolic variables with stackrefs.
+ for inp in used_stack_inputs:
+ if inp.is_array():
+ raise analysis_error("Pure evaluation cannot take array-like inputs.", tkn)
+ if inp.used:
+ emitter.emit(f"{stackref_type_name(inp)}{inp.name} = sym_get_const_as_stackref(ctx, {inp.name}_sym);\n")
+ # Rename all output variables to stackref variant.
+ for outp in self.original_uop.stack.outputs:
+ if outp.is_array():
+ raise analysis_error(
+ "Array output StackRefs not supported for evaluating pure ops.",
+ self.original_uop.body.open
+ )
+ emitter.emit(f"_PyStackRef {outp.name}_stackref;\n")
+
+
+ storage = Storage.for_uop(self.stack, self.original_uop, CWriter.null(), check_liveness=False)
+ # No reference management of outputs needed.
+ for var in storage.outputs:
+ var.in_local = True
+ emitter.emit("/* Start of uop copied from bytecodes for constant evaluation */\n")
+ emitter.emit_tokens(self.original_uop, storage, inst=None, emit_braces=False)
+ self.out.start_line()
+ emitter.emit("/* End of uop copied from bytecodes for constant evaluation */\n")
+ # Finally, assign back the output stackrefs to symbolics.
+ for outp in self.original_uop.stack.outputs:
+ # All new stackrefs are created from new references.
+ # That's how the stackref contract works.
+ if not outp.peek:
+ emitter.emit(f"{outp.name} = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal({outp.name}_stackref));\n")
+ else:
+ emitter.emit(f"{outp.name} = sym_new_const(ctx, PyStackRef_AsPyObjectBorrow({outp.name}_stackref));\n")
+ storage.flush(self.out)
+ emitter.emit("break;\n")
+ emitter.emit("}\n")
+ return True
+
+class OptimizerConstantEmitter(OptimizerEmitter):
+ def __init__(self, out: CWriter, labels: dict[str, Label], original_uop: Uop, stack: Stack):
+ super().__init__(out, labels, original_uop, stack)
+ # Replace all outputs to point to their stackref versions.
+ overrides = {
+ outp.name: self.emit_stackref_override for outp in self.original_uop.stack.outputs
+ }
+ self._replacers = {**self._replacers, **overrides}
+ self.cannot_escape = True
+
+ def emit_to_with_replacement(
+ self,
+ out: CWriter,
+ tkn_iter: TokenIterator,
+ end: str,
+ uop: CodeSection,
+ storage: Storage,
+ inst: Instruction | None
+ ) -> Token:
+ parens = 0
+ for tkn in tkn_iter:
+ if tkn.kind == end and parens == 0:
+ return tkn
+ if tkn.kind == "LPAREN":
+ parens += 1
+ if tkn.kind == "RPAREN":
+ parens -= 1
+ if tkn.text in self._replacers:
+ self._replacers[tkn.text](tkn, tkn_iter, uop, storage, inst)
+ else:
+ out.emit(tkn)
+ raise analysis_error(f"Expecting {end}. Reached end of file", tkn)
+
+ def emit_stackref_override(
+ self,
+ tkn: Token,
+ tkn_iter: TokenIterator,
+ uop: CodeSection,
+ storage: Storage,
+ inst: Instruction | None,
+ ) -> bool:
+ self.out.emit(tkn)
+ self.out.emit("_stackref ")
+ return True
+
+ def deopt_if(
+ self,
+ tkn: Token,
+ tkn_iter: TokenIterator,
+ uop: CodeSection,
+ storage: Storage,
+ inst: Instruction | None,
+ ) -> bool:
+ self.out.start_line()
+ self.out.emit("if (")
+ lparen = next(tkn_iter)
+ assert lparen.kind == "LPAREN"
+ first_tkn = tkn_iter.peek()
+ self.emit_to_with_replacement(self.out, tkn_iter, "RPAREN", uop, storage, inst)
+ self.emit(") {\n")
+ next(tkn_iter) # Semi colon
+ # We guarantee this will deopt in real-world code
+ # via constants analysis. So just bail.
+ self.emit("ctx->done = true;\n")
+ self.emit("break;\n")
+ self.emit("}\n")
+ return not always_true(first_tkn)
+
+ exit_if = deopt_if
+
+ def error_if(
+ self,
+ tkn: Token,
+ tkn_iter: TokenIterator,
+ uop: CodeSection,
+ storage: Storage,
+ inst: Instruction | None,
+ ) -> bool:
+ lparen = next(tkn_iter)
+ assert lparen.kind == "LPAREN"
+ first_tkn = tkn_iter.peek()
+ unconditional = always_true(first_tkn)
+ if unconditional:
+ next(tkn_iter)
+ next(tkn_iter) # RPAREN
+ self.out.start_line()
+ else:
+ self.out.emit_at("if ", tkn)
+ self.emit(lparen)
+ self.emit_to_with_replacement(self.out, tkn_iter, "RPAREN", uop, storage, inst)
+ self.out.emit(") {\n")
+ next(tkn_iter) # Semi colon
+ storage.clear_inputs("at ERROR_IF")
+
+ self.out.emit("goto error;\n")
+ if not unconditional:
+ self.out.emit("}\n")
+ return not unconditional
+
+
def write_uop(
override: Uop | None,
uop: Uop,
@@ -175,13 +367,14 @@ def write_uop(
cast = f"uint{cache.size*16}_t"
out.emit(f"{type}{cache.name} = ({cast})this_instr->operand0;\n")
if override:
- emitter = OptimizerEmitter(out, {})
+ emitter = OptimizerEmitter(out, {}, uop, stack.copy())
# No reference management of inputs needed.
for var in storage.inputs: # type: ignore[possibly-undefined]
var.in_local = False
_, storage = emitter.emit_tokens(override, storage, None, False)
out.start_line()
storage.flush(out)
+ out.start_line()
else:
emit_default(out, uop, stack)
out.start_line()
diff --git a/Tools/inspection/benchmark_external_inspection.py b/Tools/inspection/benchmark_external_inspection.py
index 62182194c1a..0ac7ac4d385 100644
--- a/Tools/inspection/benchmark_external_inspection.py
+++ b/Tools/inspection/benchmark_external_inspection.py
@@ -174,6 +174,7 @@ def benchmark(unwinder, duration_seconds=10):
total_work_time = 0.0
start_time = time.perf_counter()
end_time = start_time + duration_seconds
+ total_attempts = 0
colors = get_colors(can_colorize())
@@ -183,6 +184,7 @@ def benchmark(unwinder, duration_seconds=10):
try:
while time.perf_counter() < end_time:
+ total_attempts += 1
work_start = time.perf_counter()
try:
stack_trace = unwinder.get_stack_trace()
@@ -194,7 +196,6 @@ def benchmark(unwinder, duration_seconds=10):
work_end = time.perf_counter()
total_work_time += work_end - work_start
- total_attempts = sample_count + fail_count
if total_attempts % 10000 == 0:
avg_work_time_us = (total_work_time / total_attempts) * 1e6
work_rate = (
@@ -221,7 +222,6 @@ def benchmark(unwinder, duration_seconds=10):
actual_end_time = time.perf_counter()
wall_time = actual_end_time - start_time
- total_attempts = sample_count + fail_count
# Return final statistics
return {
@@ -346,6 +346,13 @@ Available code examples:
help="Code example to benchmark (default: basic)",
)
+ parser.add_argument(
+ "--threads",
+ choices=["all", "main", "only_active"],
+ default="all",
+ help="Which threads to include in the benchmark (default: all)",
+ )
+
return parser.parse_args()
@@ -419,8 +426,15 @@ def main():
# Create unwinder and run benchmark
print(f"{colors.BLUE}Initializing unwinder...{colors.RESET}")
try:
+ kwargs = {}
+ if args.threads == "all":
+ kwargs["all_threads"] = True
+ elif args.threads == "main":
+ kwargs["all_threads"] = False
+ elif args.threads == "only_active":
+ kwargs["only_active_thread"] = True
unwinder = _remote_debugging.RemoteUnwinder(
- process.pid, all_threads=True
+ process.pid, **kwargs
)
results = benchmark(unwinder, duration_seconds=args.duration)
finally:
diff --git a/Tools/jit/_optimizers.py b/Tools/jit/_optimizers.py
new file mode 100644
index 00000000000..1077e4106fd
--- /dev/null
+++ b/Tools/jit/_optimizers.py
@@ -0,0 +1,319 @@
+"""Low-level optimization of textual assembly."""
+
+import dataclasses
+import pathlib
+import re
+import typing
+
+# Same as saying "not string.startswith('')":
+_RE_NEVER_MATCH = re.compile(r"(?!)")
+# Dictionary mapping branch instructions to their inverted branch instructions.
+# If a branch cannot be inverted, the value is None:
+_X86_BRANCHES = {
+ # https://www.felixcloutier.com/x86/jcc
+ "ja": "jna",
+ "jae": "jnae",
+ "jb": "jnb",
+ "jbe": "jnbe",
+ "jc": "jnc",
+ "jcxz": None,
+ "je": "jne",
+ "jecxz": None,
+ "jg": "jng",
+ "jge": "jnge",
+ "jl": "jnl",
+ "jle": "jnle",
+ "jo": "jno",
+ "jp": "jnp",
+ "jpe": "jpo",
+ "jrcxz": None,
+ "js": "jns",
+ "jz": "jnz",
+ # https://www.felixcloutier.com/x86/loop:loopcc
+ "loop": None,
+ "loope": None,
+ "loopne": None,
+ "loopnz": None,
+ "loopz": None,
+}
+# Update with all of the inverted branches, too:
+_X86_BRANCHES |= {v: k for k, v in _X86_BRANCHES.items() if v}
+
+
+@dataclasses.dataclass
+class _Block:
+ label: str | None = None
+ # Non-instruction lines like labels, directives, and comments:
+ noninstructions: list[str] = dataclasses.field(default_factory=list)
+ # Instruction lines:
+ instructions: list[str] = dataclasses.field(default_factory=list)
+ # If this block ends in a jump, where to?
+ target: typing.Self | None = None
+ # The next block in the linked list:
+ link: typing.Self | None = None
+ # Whether control flow can fall through to the linked block above:
+ fallthrough: bool = True
+ # Whether this block can eventually reach the next uop (_JIT_CONTINUE):
+ hot: bool = False
+
+ def resolve(self) -> typing.Self:
+ """Find the first non-empty block reachable from this one."""
+ block = self
+ while block.link and not block.instructions:
+ block = block.link
+ return block
+
+
+@dataclasses.dataclass
+class Optimizer:
+ """Several passes of analysis and optimization for textual assembly."""
+
+ path: pathlib.Path
+ _: dataclasses.KW_ONLY
+ # prefix used to mangle symbols on some platforms:
+ prefix: str = ""
+ # The first block in the linked list:
+ _root: _Block = dataclasses.field(init=False, default_factory=_Block)
+ _labels: dict[str, _Block] = dataclasses.field(init=False, default_factory=dict)
+ # No groups:
+ _re_noninstructions: typing.ClassVar[re.Pattern[str]] = re.compile(
+ r"\s*(?:\.|#|//|$)"
+ )
+ # One group (label):
+ _re_label: typing.ClassVar[re.Pattern[str]] = re.compile(
+ r'\s*(?P<label>[\w."$?@]+):'
+ )
+ # Override everything that follows in subclasses:
+ _alignment: typing.ClassVar[int] = 1
+ _branches: typing.ClassVar[dict[str, str | None]] = {}
+ # Two groups (instruction and target):
+ _re_branch: typing.ClassVar[re.Pattern[str]] = _RE_NEVER_MATCH
+ # One group (target):
+ _re_jump: typing.ClassVar[re.Pattern[str]] = _RE_NEVER_MATCH
+ # No groups:
+ _re_return: typing.ClassVar[re.Pattern[str]] = _RE_NEVER_MATCH
+
+ def __post_init__(self) -> None:
+ # Split the code into a linked list of basic blocks. A basic block is an
+ # optional label, followed by zero or more non-instruction lines,
+ # followed by zero or more instruction lines (only the last of which may
+ # be a branch, jump, or return):
+ text = self._preprocess(self.path.read_text())
+ block = self._root
+ for line in text.splitlines():
+ # See if we need to start a new block:
+ if match := self._re_label.match(line):
+ # Label. New block:
+ block.link = block = self._lookup_label(match["label"])
+ block.noninstructions.append(line)
+ continue
+ if self._re_noninstructions.match(line):
+ if block.instructions:
+ # Non-instruction lines. New block:
+ block.link = block = _Block()
+ block.noninstructions.append(line)
+ continue
+ if block.target or not block.fallthrough:
+ # Current block ends with a branch, jump, or return. New block:
+ block.link = block = _Block()
+ block.instructions.append(line)
+ if match := self._re_branch.match(line):
+ # A block ending in a branch has a target and fallthrough:
+ block.target = self._lookup_label(match["target"])
+ assert block.fallthrough
+ elif match := self._re_jump.match(line):
+ # A block ending in a jump has a target and no fallthrough:
+ block.target = self._lookup_label(match["target"])
+ block.fallthrough = False
+ elif self._re_return.match(line):
+ # A block ending in a return has no target and fallthrough:
+ assert not block.target
+ block.fallthrough = False
+
+ def _preprocess(self, text: str) -> str:
+ # Override this method to do preprocessing of the textual assembly:
+ return text
+
+ @classmethod
+ def _invert_branch(cls, line: str, target: str) -> str | None:
+ match = cls._re_branch.match(line)
+ assert match
+ inverted = cls._branches.get(match["instruction"])
+ if not inverted:
+ return None
+ (a, b), (c, d) = match.span("instruction"), match.span("target")
+ # Before:
+ # je FOO
+ # After:
+ # jne BAR
+ return "".join([line[:a], inverted, line[b:c], target, line[d:]])
+
+ @classmethod
+ def _update_jump(cls, line: str, target: str) -> str:
+ match = cls._re_jump.match(line)
+ assert match
+ a, b = match.span("target")
+ # Before:
+ # jmp FOO
+ # After:
+ # jmp BAR
+ return "".join([line[:a], target, line[b:]])
+
+ def _lookup_label(self, label: str) -> _Block:
+ if label not in self._labels:
+ self._labels[label] = _Block(label)
+ return self._labels[label]
+
+ def _blocks(self) -> typing.Generator[_Block, None, None]:
+ block: _Block | None = self._root
+ while block:
+ yield block
+ block = block.link
+
+ def _body(self) -> str:
+ lines = []
+ hot = True
+ for block in self._blocks():
+ if hot != block.hot:
+ hot = block.hot
+ # Make it easy to tell at a glance where cold code is:
+ lines.append(f"# JIT: {'HOT' if hot else 'COLD'} ".ljust(80, "#"))
+ lines.extend(block.noninstructions)
+ lines.extend(block.instructions)
+ return "\n".join(lines)
+
+ def _predecessors(self, block: _Block) -> typing.Generator[_Block, None, None]:
+ # This is inefficient, but it's never wrong:
+ for pre in self._blocks():
+ if pre.target is block or pre.fallthrough and pre.link is block:
+ yield pre
+
+ def _insert_continue_label(self) -> None:
+ # Find the block with the last instruction:
+ for end in reversed(list(self._blocks())):
+ if end.instructions:
+ break
+ # Before:
+ # jmp FOO
+ # After:
+ # jmp FOO
+ # .balign 8
+ # _JIT_CONTINUE:
+ # This lets the assembler encode _JIT_CONTINUE jumps at build time!
+ align = _Block()
+ align.noninstructions.append(f"\t.balign\t{self._alignment}")
+ continuation = self._lookup_label(f"{self.prefix}_JIT_CONTINUE")
+ assert continuation.label
+ continuation.noninstructions.append(f"{continuation.label}:")
+ end.link, align.link, continuation.link = align, continuation, end.link
+
+ def _mark_hot_blocks(self) -> None:
+ # Start with the last block, and perform a DFS to find all blocks that
+ # can eventually reach it:
+ todo = list(self._blocks())[-1:]
+ while todo:
+ block = todo.pop()
+ block.hot = True
+ todo.extend(pre for pre in self._predecessors(block) if not pre.hot)
+
+ def _invert_hot_branches(self) -> None:
+ for branch in self._blocks():
+ link = branch.link
+ if link is None:
+ continue
+ jump = link.resolve()
+ # Before:
+ # je HOT
+ # jmp COLD
+ # After:
+ # jne COLD
+ # jmp HOT
+ if (
+ # block ends with a branch to hot code...
+ branch.target
+ and branch.fallthrough
+ and branch.target.hot
+ # ...followed by a jump to cold code with no other predecessors:
+ and jump.target
+ and not jump.fallthrough
+ and not jump.target.hot
+ and len(jump.instructions) == 1
+ and list(self._predecessors(jump)) == [branch]
+ ):
+ assert jump.target.label
+ assert branch.target.label
+ inverted = self._invert_branch(
+ branch.instructions[-1], jump.target.label
+ )
+ # Check to see if the branch can even be inverted:
+ if inverted is None:
+ continue
+ branch.instructions[-1] = inverted
+ jump.instructions[-1] = self._update_jump(
+ jump.instructions[-1], branch.target.label
+ )
+ branch.target, jump.target = jump.target, branch.target
+ jump.hot = True
+
+ def _remove_redundant_jumps(self) -> None:
+ # Zero-length jumps can be introduced by _insert_continue_label and
+ # _invert_hot_branches:
+ for block in self._blocks():
+ # Before:
+ # jmp FOO
+ # FOO:
+ # After:
+ # FOO:
+ if (
+ block.target
+ and block.link
+ and block.target.resolve() is block.link.resolve()
+ ):
+ block.target = None
+ block.fallthrough = True
+ block.instructions.pop()
+
+ def run(self) -> None:
+ """Run this optimizer."""
+ self._insert_continue_label()
+ self._mark_hot_blocks()
+ self._invert_hot_branches()
+ self._remove_redundant_jumps()
+ self.path.write_text(self._body())
+
+
+class OptimizerAArch64(Optimizer): # pylint: disable = too-few-public-methods
+ """aarch64-apple-darwin/aarch64-pc-windows-msvc/aarch64-unknown-linux-gnu"""
+
+ # TODO: @diegorusso
+ _alignment = 8
+ # https://developer.arm.com/documentation/ddi0602/2025-03/Base-Instructions/B--Branch-
+ _re_jump = re.compile(r"\s*b\s+(?P<target>[\w.]+)")
+
+
+class OptimizerX86(Optimizer): # pylint: disable = too-few-public-methods
+ """i686-pc-windows-msvc/x86_64-apple-darwin/x86_64-unknown-linux-gnu"""
+
+ _branches = _X86_BRANCHES
+ _re_branch = re.compile(
+ rf"\s*(?P<instruction>{'|'.join(_X86_BRANCHES)})\s+(?P<target>[\w.]+)"
+ )
+ # https://www.felixcloutier.com/x86/jmp
+ _re_jump = re.compile(r"\s*jmp\s+(?P<target>[\w.]+)")
+ # https://www.felixcloutier.com/x86/ret
+ _re_return = re.compile(r"\s*ret\b")
+
+
+class OptimizerX8664Windows(OptimizerX86): # pylint: disable = too-few-public-methods
+ """x86_64-pc-windows-msvc"""
+
+ def _preprocess(self, text: str) -> str:
+ text = super()._preprocess(text)
+ # Before:
+ # rex64 jmpq *__imp__JIT_CONTINUE(%rip)
+ # After:
+ # jmp _JIT_CONTINUE
+ far_indirect_jump = (
+ rf"rex64\s+jmpq\s+\*__imp_(?P<target>{self.prefix}_JIT_\w+)\(%rip\)"
+ )
+ return re.sub(far_indirect_jump, r"jmp\t\g<target>", text)
diff --git a/Tools/jit/_stencils.py b/Tools/jit/_stencils.py
index 03b0ba647b0..1d82f5366f6 100644
--- a/Tools/jit/_stencils.py
+++ b/Tools/jit/_stencils.py
@@ -17,8 +17,6 @@ class HoleValue(enum.Enum):
# The base address of the machine code for the current uop (exposed as _JIT_ENTRY):
CODE = enum.auto()
- # The base address of the machine code for the next uop (exposed as _JIT_CONTINUE):
- CONTINUE = enum.auto()
# The base address of the read-only data for this uop:
DATA = enum.auto()
# The address of the current executor (exposed as _JIT_EXECUTOR):
@@ -97,7 +95,6 @@ _PATCH_FUNCS = {
# Translate HoleValues to C expressions:
_HOLE_EXPRS = {
HoleValue.CODE: "(uintptr_t)code",
- HoleValue.CONTINUE: "(uintptr_t)code + sizeof(code_body)",
HoleValue.DATA: "(uintptr_t)data",
HoleValue.EXECUTOR: "(uintptr_t)executor",
# These should all have been turned into DATA values by process_relocations:
@@ -209,64 +206,6 @@ class Stencil:
self.disassembly.append(f"{offset:x}: {' '.join(['00'] * padding)}")
self.body.extend([0] * padding)
- def add_nops(self, nop: bytes, alignment: int) -> None:
- """Add NOPs until there is alignment. Fail if it is not possible."""
- offset = len(self.body)
- nop_size = len(nop)
-
- # Calculate the gap to the next multiple of alignment.
- gap = -offset % alignment
- if gap:
- if gap % nop_size == 0:
- count = gap // nop_size
- self.body.extend(nop * count)
- else:
- raise ValueError(
- f"Cannot add nops of size '{nop_size}' to a body with "
- f"offset '{offset}' to align with '{alignment}'"
- )
-
- def remove_jump(self) -> None:
- """Remove a zero-length continuation jump, if it exists."""
- hole = max(self.holes, key=lambda hole: hole.offset)
- match hole:
- case Hole(
- offset=offset,
- kind="IMAGE_REL_AMD64_REL32",
- value=HoleValue.GOT,
- symbol="_JIT_CONTINUE",
- addend=-4,
- ) as hole:
- # jmp qword ptr [rip]
- jump = b"\x48\xff\x25\x00\x00\x00\x00"
- offset -= 3
- case Hole(
- offset=offset,
- kind="IMAGE_REL_I386_REL32" | "R_X86_64_PLT32" | "X86_64_RELOC_BRANCH",
- value=HoleValue.CONTINUE,
- symbol=None,
- addend=addend,
- ) as hole if (
- _signed(addend) == -4
- ):
- # jmp 5
- jump = b"\xe9\x00\x00\x00\x00"
- offset -= 1
- case Hole(
- offset=offset,
- kind="R_AARCH64_JUMP26",
- value=HoleValue.CONTINUE,
- symbol=None,
- addend=0,
- ) as hole:
- # b #4
- jump = b"\x00\x00\x00\x14"
- case _:
- return
- if self.body[offset:] == jump:
- self.body = self.body[:offset]
- self.holes.remove(hole)
-
@dataclasses.dataclass
class StencilGroup:
@@ -284,9 +223,7 @@ class StencilGroup:
_got: dict[str, int] = dataclasses.field(default_factory=dict, init=False)
_trampolines: set[int] = dataclasses.field(default_factory=set, init=False)
- def process_relocations(
- self, known_symbols: dict[str, int], *, alignment: int = 1, nop: bytes = b""
- ) -> None:
+ def process_relocations(self, known_symbols: dict[str, int]) -> None:
"""Fix up all GOT and internal relocations for this stencil group."""
for hole in self.code.holes.copy():
if (
@@ -306,8 +243,6 @@ class StencilGroup:
self._trampolines.add(ordinal)
hole.addend = ordinal
hole.symbol = None
- self.code.remove_jump()
- self.code.add_nops(nop=nop, alignment=alignment)
self.data.pad(8)
for stencil in [self.code, self.data]:
for hole in stencil.holes:
diff --git a/Tools/jit/_targets.py b/Tools/jit/_targets.py
index b383e39da19..ed10329d25d 100644
--- a/Tools/jit/_targets.py
+++ b/Tools/jit/_targets.py
@@ -13,6 +13,7 @@ import typing
import shlex
import _llvm
+import _optimizers
import _schema
import _stencils
import _writer
@@ -41,8 +42,8 @@ class _Target(typing.Generic[_S, _R]):
triple: str
condition: str
_: dataclasses.KW_ONLY
- alignment: int = 1
args: typing.Sequence[str] = ()
+ optimizer: type[_optimizers.Optimizer] = _optimizers.Optimizer
prefix: str = ""
stable: bool = False
debug: bool = False
@@ -121,8 +122,9 @@ class _Target(typing.Generic[_S, _R]):
async def _compile(
self, opname: str, c: pathlib.Path, tempdir: pathlib.Path
) -> _stencils.StencilGroup:
+ s = tempdir / f"{opname}.s"
o = tempdir / f"{opname}.o"
- args = [
+ args_s = [
f"--target={self.triple}",
"-DPy_BUILD_CORE_MODULE",
"-D_DEBUG" if self.debug else "-DNDEBUG",
@@ -136,7 +138,7 @@ class _Target(typing.Generic[_S, _R]):
f"-I{CPYTHON / 'Python'}",
f"-I{CPYTHON / 'Tools' / 'jit'}",
"-O3",
- "-c",
+ "-S",
# Shorten full absolute file paths in the generated code (like the
# __FILE__ macro and assert failure messages) for reproducibility:
f"-ffile-prefix-map={CPYTHON}=.",
@@ -155,13 +157,16 @@ class _Target(typing.Generic[_S, _R]):
"-fno-stack-protector",
"-std=c11",
"-o",
- f"{o}",
+ f"{s}",
f"{c}",
*self.args,
# Allow user-provided CFLAGS to override any defaults
*shlex.split(self.cflags),
]
- await _llvm.run("clang", args, echo=self.verbose)
+ await _llvm.run("clang", args_s, echo=self.verbose)
+ self.optimizer(s, prefix=self.prefix).run()
+ args_o = [f"--target={self.triple}", "-c", "-o", f"{o}", f"{s}"]
+ await _llvm.run("clang", args_o, echo=self.verbose)
return await self._parse(o)
async def _build_stencils(self) -> dict[str, _stencils.StencilGroup]:
@@ -190,11 +195,7 @@ class _Target(typing.Generic[_S, _R]):
tasks.append(group.create_task(coro, name=opname))
stencil_groups = {task.get_name(): task.result() for task in tasks}
for stencil_group in stencil_groups.values():
- stencil_group.process_relocations(
- known_symbols=self.known_symbols,
- alignment=self.alignment,
- nop=self._get_nop(),
- )
+ stencil_group.process_relocations(self.known_symbols)
return stencil_groups
def build(
@@ -524,42 +525,43 @@ class _MachO(
def get_target(host: str) -> _COFF | _ELF | _MachO:
"""Build a _Target for the given host "triple" and options."""
+ optimizer: type[_optimizers.Optimizer]
target: _COFF | _ELF | _MachO
if re.fullmatch(r"aarch64-apple-darwin.*", host):
condition = "defined(__aarch64__) && defined(__APPLE__)"
- target = _MachO(host, condition, alignment=8, prefix="_")
+ optimizer = _optimizers.OptimizerAArch64
+ target = _MachO(host, condition, optimizer=optimizer, prefix="_")
elif re.fullmatch(r"aarch64-pc-windows-msvc", host):
args = ["-fms-runtime-lib=dll", "-fplt"]
condition = "defined(_M_ARM64)"
- target = _COFF(host, condition, alignment=8, args=args)
+ optimizer = _optimizers.OptimizerAArch64
+ target = _COFF(host, condition, args=args, optimizer=optimizer)
elif re.fullmatch(r"aarch64-.*-linux-gnu", host):
- args = [
- "-fpic",
- # On aarch64 Linux, intrinsics were being emitted and this flag
- # was required to disable them.
- "-mno-outline-atomics",
- ]
+ # -mno-outline-atomics: Keep intrinsics from being emitted.
+ args = ["-fpic", "-mno-outline-atomics"]
condition = "defined(__aarch64__) && defined(__linux__)"
- target = _ELF(host, condition, alignment=8, args=args)
+ optimizer = _optimizers.OptimizerAArch64
+ target = _ELF(host, condition, args=args, optimizer=optimizer)
elif re.fullmatch(r"i686-pc-windows-msvc", host):
- args = [
- "-DPy_NO_ENABLE_SHARED",
- # __attribute__((preserve_none)) is not supported
- "-Wno-ignored-attributes",
- ]
+ # -Wno-ignored-attributes: __attribute__((preserve_none)) is not supported here.
+ args = ["-DPy_NO_ENABLE_SHARED", "-Wno-ignored-attributes"]
+ optimizer = _optimizers.OptimizerX86
condition = "defined(_M_IX86)"
- target = _COFF(host, condition, args=args, prefix="_")
+ target = _COFF(host, condition, args=args, optimizer=optimizer, prefix="_")
elif re.fullmatch(r"x86_64-apple-darwin.*", host):
condition = "defined(__x86_64__) && defined(__APPLE__)"
- target = _MachO(host, condition, prefix="_")
+ optimizer = _optimizers.OptimizerX86
+ target = _MachO(host, condition, optimizer=optimizer, prefix="_")
elif re.fullmatch(r"x86_64-pc-windows-msvc", host):
args = ["-fms-runtime-lib=dll"]
condition = "defined(_M_X64)"
- target = _COFF(host, condition, args=args)
+ optimizer = _optimizers.OptimizerX8664Windows
+ target = _COFF(host, condition, args=args, optimizer=optimizer)
elif re.fullmatch(r"x86_64-.*-linux-gnu", host):
args = ["-fno-pic", "-mcmodel=medium", "-mlarge-data-threshold=0"]
condition = "defined(__x86_64__) && defined(__linux__)"
- target = _ELF(host, condition, args=args)
+ optimizer = _optimizers.OptimizerX86
+ target = _ELF(host, condition, args=args, optimizer=optimizer)
else:
raise ValueError(host)
return target
diff --git a/Tools/scripts/summarize_stats.py b/Tools/scripts/summarize_stats.py
index 68cfad3f92c..905af9dcfd8 100644
--- a/Tools/scripts/summarize_stats.py
+++ b/Tools/scripts/summarize_stats.py
@@ -492,7 +492,7 @@ class Stats:
): (trace_too_long, attempts),
Doc(
"Trace too short",
- "A potential trace is abandoned because it it too short.",
+ "A potential trace is abandoned because it is too short.",
): (trace_too_short, attempts),
Doc(
"Inner loop found", "A trace is truncated because it has an inner loop"
diff --git a/configure b/configure
index 43b36d9231e..4a0f8959c87 100755
--- a/configure
+++ b/configure
@@ -15750,10 +15750,18 @@ fi
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for --with-system-libmpdec" >&5
printf %s "checking for --with-system-libmpdec... " >&6; }
+
# Check whether --with-system_libmpdec was given.
if test ${with_system_libmpdec+y}
then :
- withval=$with_system_libmpdec;
+ withval=$with_system_libmpdec; if test "x$with_system_libmpdec" = xno
+then :
+ LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec"
+ LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)"
+ LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)"
+ have_mpdec=yes
+ with_system_libmpdec=no
+fi
else case e in #(
e) with_system_libmpdec="yes" ;;
esac
@@ -15762,8 +15770,6 @@ fi
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: $with_system_libmpdec" >&5
printf "%s\n" "$with_system_libmpdec" >&6; }
-
-
if test "x$with_system_libmpdec" = xyes
then :
@@ -15841,13 +15847,6 @@ else
printf "%s\n" "yes" >&6; }
fi
-else case e in #(
- e) LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec"
- LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)"
- LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)"
- have_mpdec=yes
- with_system_libmpdec=no ;;
-esac
fi
if test "x$with_system_libmpdec" = xyes
@@ -15894,21 +15893,6 @@ LDFLAGS=$save_LDFLAGS
LIBS=$save_LIBS
-else case e in #(
- e) { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: the bundled copy of libmpdecimal is scheduled for removal in Python 3.15; consider using a system installed mpdecimal library." >&5
-printf "%s\n" "$as_me: WARNING: the bundled copy of libmpdecimal is scheduled for removal in Python 3.15; consider using a system installed mpdecimal library." >&2;} ;;
-esac
-fi
-
-if test "$with_system_libmpdec" = "yes" && test "$have_mpdec" = "no"
-then :
- { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: no system libmpdecimal found; falling back to bundled libmpdecimal (deprecated and scheduled for removal in Python 3.15)" >&5
-printf "%s\n" "$as_me: WARNING: no system libmpdecimal found; falling back to bundled libmpdecimal (deprecated and scheduled for removal in Python 3.15)" >&2;}
- LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec"
- LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)"
- LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)"
- have_mpdec=yes
- with_system_libmpdec=no
fi
# Disable forced inlining in debug builds, see GH-94847
@@ -28234,7 +28218,7 @@ printf "%s\n" "$ac_cv_normalize_century" >&6; }
if test "$ac_cv_normalize_century" = yes
then
-printf "%s\n" "#define Py_NORMALIZE_CENTURY 1" >>confdefs.h
+printf "%s\n" "#define _Py_NORMALIZE_CENTURY 1" >>confdefs.h
fi
@@ -32633,7 +32617,7 @@ then :
LIBHACL_SIMD128_FLAGS="-msse -msse2 -msse3 -msse4.1 -msse4.2"
-printf "%s\n" "#define HACL_CAN_COMPILE_SIMD128 1" >>confdefs.h
+printf "%s\n" "#define _Py_HACL_CAN_COMPILE_VEC128 1" >>confdefs.h
# macOS universal2 builds *support* the -msse etc flags because they're
@@ -32709,7 +32693,7 @@ then :
LIBHACL_SIMD256_FLAGS="-mavx2"
-printf "%s\n" "#define HACL_CAN_COMPILE_SIMD256 1" >>confdefs.h
+printf "%s\n" "#define _Py_HACL_CAN_COMPILE_VEC256 1" >>confdefs.h
# macOS universal2 builds *support* the -mavx2 compiler flag because it's
@@ -33194,6 +33178,18 @@ fi
printf "%s\n" "$py_cv_module__decimal" >&6; }
+if test "x$with_system_libmpdec" = xno
+then :
+ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: the bundled copy of libmpdec is scheduled for removal in Python 3.16; consider using a system installed mpdecimal library." >&5
+printf "%s\n" "$as_me: WARNING: the bundled copy of libmpdec is scheduled for removal in Python 3.16; consider using a system installed mpdecimal library." >&2;}
+fi
+if test "$with_system_libmpdec" = "yes" && test "$have_mpdec" = "no"
+then :
+ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: WARNING: no system libmpdec found; falling back to pure-Python version for the decimal module" >&5
+printf "%s\n" "$as_me: WARNING: no system libmpdec found; falling back to pure-Python version for the decimal module" >&2;}
+fi
+
+
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for stdlib extension module _dbm" >&5
printf %s "checking for stdlib extension module _dbm... " >&6; }
if test "$py_cv_module__dbm" != "n/a"
diff --git a/configure.ac b/configure.ac
index e77696e3a4e..10d7a0c6056 100644
--- a/configure.ac
+++ b/configure.ac
@@ -4169,31 +4169,30 @@ fi
# Check for use of the system libmpdec library
AC_MSG_CHECKING([for --with-system-libmpdec])
+AC_DEFUN([USE_BUNDLED_LIBMPDEC],
+ [LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec"
+ LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)"
+ LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)"
+ have_mpdec=yes
+ with_system_libmpdec=no])
AC_ARG_WITH(
[system_libmpdec],
[AS_HELP_STRING(
[--with-system-libmpdec],
[build _decimal module using an installed mpdecimal library, see Doc/library/decimal.rst (default is yes)]
)],
- [],
+ [AS_IF([test "x$with_system_libmpdec" = xno],
+ [USE_BUNDLED_LIBMPDEC()])],
[with_system_libmpdec="yes"])
AC_MSG_RESULT([$with_system_libmpdec])
-AC_DEFUN([USE_BUNDLED_LIBMPDEC],
- [LIBMPDEC_CFLAGS="-I\$(srcdir)/Modules/_decimal/libmpdec"
- LIBMPDEC_LIBS="-lm \$(LIBMPDEC_A)"
- LIBMPDEC_INTERNAL="\$(LIBMPDEC_HEADERS) \$(LIBMPDEC_A)"
- have_mpdec=yes
- with_system_libmpdec=no])
-
AS_VAR_IF(
[with_system_libmpdec], [yes],
[PKG_CHECK_MODULES(
[LIBMPDEC], [libmpdec >= 2.5.0], [],
[LIBMPDEC_CFLAGS=${LIBMPDEC_CFLAGS-""}
LIBMPDEC_LIBS=${LIBMPDEC_LIBS-"-lmpdec -lm"}
- LIBMPDEC_INTERNAL=])],
- [USE_BUNDLED_LIBMPDEC()])
+ LIBMPDEC_INTERNAL=])])
AS_VAR_IF([with_system_libmpdec], [yes],
[WITH_SAVE_ENV([
@@ -4209,16 +4208,7 @@ AS_VAR_IF([with_system_libmpdec], [yes],
], [const char *x = mpd_version();])],
[have_mpdec=yes],
[have_mpdec=no])
- ])],
- [AC_MSG_WARN([m4_normalize([
- the bundled copy of libmpdecimal is scheduled for removal in Python 3.15;
- consider using a system installed mpdecimal library.])])])
-
-AS_IF([test "$with_system_libmpdec" = "yes" && test "$have_mpdec" = "no"],
- [AC_MSG_WARN([m4_normalize([
- no system libmpdecimal found; falling back to bundled libmpdecimal
- (deprecated and scheduled for removal in Python 3.15)])])
- USE_BUNDLED_LIBMPDEC()])
+ ])])
# Disable forced inlining in debug builds, see GH-94847
AS_VAR_IF(
@@ -6813,7 +6803,7 @@ int main(void)
[ac_cv_normalize_century=yes])])
if test "$ac_cv_normalize_century" = yes
then
- AC_DEFINE([Py_NORMALIZE_CENTURY], [1],
+ AC_DEFINE([_Py_NORMALIZE_CENTURY], [1],
[Define if year with century should be normalized for strftime.])
fi
@@ -8026,7 +8016,8 @@ then
AX_CHECK_COMPILE_FLAG([-msse -msse2 -msse3 -msse4.1 -msse4.2],[
[LIBHACL_SIMD128_FLAGS="-msse -msse2 -msse3 -msse4.1 -msse4.2"]
- AC_DEFINE([HACL_CAN_COMPILE_SIMD128], [1], [HACL* library can compile SIMD128 implementations])
+ AC_DEFINE([_Py_HACL_CAN_COMPILE_VEC128], [1], [
+ HACL* library can compile SIMD128 implementations])
# macOS universal2 builds *support* the -msse etc flags because they're
# available on x86_64. However, performance of the HACL SIMD128 implementation
@@ -8057,7 +8048,8 @@ if test "$ac_sys_system" != "Linux-android" -a "$ac_sys_system" != "WASI" || \
then
AX_CHECK_COMPILE_FLAG([-mavx2],[
[LIBHACL_SIMD256_FLAGS="-mavx2"]
- AC_DEFINE([HACL_CAN_COMPILE_SIMD256], [1], [HACL* library can compile SIMD256 implementations])
+ AC_DEFINE([_Py_HACL_CAN_COMPILE_VEC256], [1], [
+ HACL* library can compile SIMD256 implementations])
# macOS universal2 builds *support* the -mavx2 compiler flag because it's
# available on x86_64; but the HACL SIMD256 build then fails because the
@@ -8134,6 +8126,16 @@ PY_STDLIB_MOD([_curses_panel],
PY_STDLIB_MOD([_decimal],
[], [test "$have_mpdec" = "yes"],
[$LIBMPDEC_CFLAGS], [$LIBMPDEC_LIBS])
+
+AS_VAR_IF([with_system_libmpdec], [no],
+ [AC_MSG_WARN([m4_normalize([
+ the bundled copy of libmpdec is scheduled for removal in Python 3.16;
+ consider using a system installed mpdecimal library.])])])
+AS_IF([test "$with_system_libmpdec" = "yes" && test "$have_mpdec" = "no"],
+ [AC_MSG_WARN([m4_normalize([
+ no system libmpdec found; falling back to pure-Python version
+ for the decimal module])])])
+
PY_STDLIB_MOD([_dbm],
[test -n "$with_dbmliborder"], [test "$have_dbm" != "no"],
[$DBM_CFLAGS], [$DBM_LIBS])
diff --git a/iOS/README.rst b/iOS/README.rst
index 13b88514493..f0979ba152e 100644
--- a/iOS/README.rst
+++ b/iOS/README.rst
@@ -196,7 +196,7 @@ simulator build with a deployment target of 15.4.
Merge thin frameworks into fat frameworks
-----------------------------------------
-Once you've built a ``Python.framework`` for each ABI and and architecture, you
+Once you've built a ``Python.framework`` for each ABI and architecture, you
must produce a "fat" framework for each ABI that contains all the architectures
for that ABI.
diff --git a/iOS/Resources/bin/arm64-apple-ios-simulator-strip b/iOS/Resources/bin/arm64-apple-ios-simulator-strip
new file mode 100755
index 00000000000..fd59d309b73
--- /dev/null
+++ b/iOS/Resources/bin/arm64-apple-ios-simulator-strip
@@ -0,0 +1,2 @@
+#!/bin/sh
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} strip -arch arm64 "$@"
diff --git a/iOS/Resources/bin/arm64-apple-ios-strip b/iOS/Resources/bin/arm64-apple-ios-strip
new file mode 100755
index 00000000000..75e823a3d02
--- /dev/null
+++ b/iOS/Resources/bin/arm64-apple-ios-strip
@@ -0,0 +1,2 @@
+#!/bin/sh
+xcrun --sdk iphoneos${IOS_SDK_VERSION} strip -arch arm64 "$@"
diff --git a/iOS/Resources/bin/x86_64-apple-ios-simulator-strip b/iOS/Resources/bin/x86_64-apple-ios-simulator-strip
new file mode 100755
index 00000000000..c5cfb289291
--- /dev/null
+++ b/iOS/Resources/bin/x86_64-apple-ios-simulator-strip
@@ -0,0 +1,2 @@
+#!/bin/sh
+xcrun --sdk iphonesimulator${IOS_SDK_VERSION} strip -arch x86_64 "$@"
diff --git a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m
index dd6e76f9496..b502a6eb277 100644
--- a/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m
+++ b/iOS/testbed/iOSTestbedTests/iOSTestbedTests.m
@@ -15,6 +15,11 @@
PyStatus status;
PyPreConfig preconfig;
PyConfig config;
+ PyObject *app_packages_path;
+ PyObject *method_args;
+ PyObject *result;
+ PyObject *site_module;
+ PyObject *site_addsitedir_attr;
PyObject *sys_module;
PyObject *sys_path_attr;
NSArray *test_args;
@@ -111,29 +116,55 @@
return;
}
- sys_module = PyImport_ImportModule("sys");
- if (sys_module == NULL) {
- XCTFail(@"Could not import sys module");
+ // Add app_packages as a site directory. This both adds to sys.path,
+ // and ensures that any .pth files in that directory will be executed.
+ site_module = PyImport_ImportModule("site");
+ if (site_module == NULL) {
+ XCTFail(@"Could not import site module");
return;
}
- sys_path_attr = PyObject_GetAttrString(sys_module, "path");
- if (sys_path_attr == NULL) {
- XCTFail(@"Could not access sys.path");
+ site_addsitedir_attr = PyObject_GetAttrString(site_module, "addsitedir");
+ if (site_addsitedir_attr == NULL || !PyCallable_Check(site_addsitedir_attr)) {
+ XCTFail(@"Could not access site.addsitedir");
return;
}
- // Add the app packages path
path = [NSString stringWithFormat:@"%@/app_packages", resourcePath, nil];
NSLog(@"App packages path: %@", path);
wtmp_str = Py_DecodeLocale([path UTF8String], NULL);
- failed = PyList_Insert(sys_path_attr, 0, PyUnicode_FromString([path UTF8String]));
- if (failed) {
- XCTFail(@"Unable to add app packages to sys.path");
+ app_packages_path = PyUnicode_FromWideChar(wtmp_str, wcslen(wtmp_str));
+ if (app_packages_path == NULL) {
+ XCTFail(@"Could not convert app_packages path to unicode");
return;
}
PyMem_RawFree(wtmp_str);
+ method_args = Py_BuildValue("(O)", app_packages_path);
+ if (method_args == NULL) {
+ XCTFail(@"Could not create arguments for site.addsitedir");
+ return;
+ }
+
+ result = PyObject_CallObject(site_addsitedir_attr, method_args);
+ if (result == NULL) {
+ XCTFail(@"Could not add app_packages directory using site.addsitedir");
+ return;
+ }
+
+ // Add test code to sys.path
+ sys_module = PyImport_ImportModule("sys");
+ if (sys_module == NULL) {
+ XCTFail(@"Could not import sys module");
+ return;
+ }
+
+ sys_path_attr = PyObject_GetAttrString(sys_module, "path");
+ if (sys_path_attr == NULL) {
+ XCTFail(@"Could not access sys.path");
+ return;
+ }
+
path = [NSString stringWithFormat:@"%@/app", resourcePath, nil];
NSLog(@"App path: %@", path);
wtmp_str = Py_DecodeLocale([path UTF8String], NULL);
diff --git a/pyconfig.h.in b/pyconfig.h.in
index d4f1da7fb10..1c533b2bfb7 100644
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -50,12 +50,6 @@
/* Define if getpgrp() must be called as getpgrp(0). */
#undef GETPGRP_HAVE_ARG
-/* HACL* library can compile SIMD128 implementations */
-#undef HACL_CAN_COMPILE_SIMD128
-
-/* HACL* library can compile SIMD256 implementations */
-#undef HACL_CAN_COMPILE_SIMD256
-
/* Define if you have the 'accept' function. */
#undef HAVE_ACCEPT
@@ -1743,9 +1737,6 @@
SipHash13: 3, externally defined: 0 */
#undef Py_HASH_ALGORITHM
-/* Define if year with century should be normalized for strftime. */
-#undef Py_NORMALIZE_CENTURY
-
/* Define if you want to enable remote debugging support. */
#undef Py_REMOTE_DEBUG
@@ -2026,6 +2017,15 @@
/* Defined if _Complex C type can be used with libffi. */
#undef _Py_FFI_SUPPORT_C_COMPLEX
+/* HACL* library can compile SIMD128 implementations */
+#undef _Py_HACL_CAN_COMPILE_VEC128
+
+/* HACL* library can compile SIMD256 implementations */
+#undef _Py_HACL_CAN_COMPILE_VEC256
+
+/* Define if year with century should be normalized for strftime. */
+#undef _Py_NORMALIZE_CENTURY
+
/* Define to force use of thread-safe errno, h_errno, and other functions */
#undef _REENTRANT