aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
context:
space:
mode:
-rw-r--r--.devcontainer/devcontainer.json2
-rw-r--r--.github/CODEOWNERS5
-rw-r--r--.github/workflows/tail-call.yml1
-rw-r--r--.gitignore5
-rw-r--r--.pre-commit-config.yaml8
-rw-r--r--.readthedocs.yml1
-rw-r--r--Doc/c-api/allocation.rst2
-rw-r--r--Doc/c-api/init_config.rst2
-rw-r--r--Doc/c-api/intro.rst42
-rw-r--r--Doc/c-api/lifecycle.rst6
-rw-r--r--Doc/c-api/module.rst42
-rw-r--r--Doc/c-api/stable.rst1
-rw-r--r--Doc/c-api/sys.rst51
-rw-r--r--Doc/c-api/unicode.rst25
-rw-r--r--Doc/data/stable_abi.dat4
-rw-r--r--Doc/deprecations/pending-removal-in-3.19.rst16
-rw-r--r--Doc/extending/building.rst6
-rw-r--r--Doc/extending/embedding.rst12
-rw-r--r--Doc/extending/extending.rst170
-rw-r--r--Doc/extending/index.rst20
-rw-r--r--Doc/extending/newtypes_tutorial.rst37
-rw-r--r--Doc/extending/windows.rst2
-rw-r--r--Doc/faq/extending.rst21
-rw-r--r--Doc/howto/cporting.rst6
-rw-r--r--Doc/howto/curses.rst2
-rw-r--r--Doc/howto/isolating-extensions.rst4
-rw-r--r--Doc/howto/urllib2.rst86
-rw-r--r--Doc/includes/newtypes/custom.c41
-rw-r--r--Doc/includes/newtypes/custom2.c42
-rw-r--r--Doc/includes/newtypes/custom3.c40
-rw-r--r--Doc/includes/newtypes/custom4.c40
-rw-r--r--Doc/includes/newtypes/sublist.c46
-rw-r--r--Doc/library/argparse.rst15
-rw-r--r--Doc/library/ast.rst303
-rw-r--r--Doc/library/asyncio-stream.rst9
-rw-r--r--Doc/library/compileall.rst13
-rw-r--r--Doc/library/compression.zstd.rst18
-rw-r--r--Doc/library/copy.rst2
-rw-r--r--Doc/library/csv.rst10
-rw-r--r--Doc/library/ctypes.rst52
-rw-r--r--Doc/library/dbm.rst43
-rw-r--r--Doc/library/doctest.rst133
-rw-r--r--Doc/library/hashlib.rst7
-rw-r--r--Doc/library/logging.handlers.rst10
-rw-r--r--Doc/library/math.rst24
-rw-r--r--Doc/library/os.path.rst32
-rw-r--r--Doc/library/shelve.rst16
-rw-r--r--Doc/library/socket.rst6
-rw-r--r--Doc/library/stdtypes.rst2
-rw-r--r--Doc/library/string.rst2
-rw-r--r--Doc/library/tarfile.rst20
-rw-r--r--Doc/library/typing.rst32
-rw-r--r--Doc/library/zlib.rst28
-rw-r--r--Doc/reference/datamodel.rst14
-rw-r--r--Doc/reference/lexical_analysis.rst3
-rw-r--r--Doc/tools/extensions/audit_events.py16
-rw-r--r--Doc/whatsnew/3.14.rst26
-rw-r--r--Doc/whatsnew/3.15.rst113
-rw-r--r--Include/abstract.h14
-rw-r--r--Include/cpython/unicodeobject.h4
-rw-r--r--Include/internal/mimalloc/mimalloc/internal.h4
-rw-r--r--Include/internal/mimalloc/mimalloc/types.h29
-rw-r--r--Include/internal/pycore_ceval.h2
-rw-r--r--Include/internal/pycore_code.h2
-rw-r--r--Include/internal/pycore_compile.h1
-rw-r--r--Include/internal/pycore_crossinterp.h35
-rw-r--r--Include/internal/pycore_global_objects_fini_generated.h1
-rw-r--r--Include/internal/pycore_global_strings.h1
-rw-r--r--Include/internal/pycore_interp_structs.h3
-rw-r--r--Include/internal/pycore_lock.h3
-rw-r--r--Include/internal/pycore_magic_number.h4
-rw-r--r--Include/internal/pycore_modsupport.h3
-rw-r--r--Include/internal/pycore_object.h7
-rw-r--r--Include/internal/pycore_opcode_metadata.h40
-rw-r--r--Include/internal/pycore_runtime_init_generated.h1
-rw-r--r--Include/internal/pycore_stackref.h74
-rw-r--r--Include/internal/pycore_sysmodule.h5
-rw-r--r--Include/internal/pycore_typeobject.h1
-rw-r--r--Include/internal/pycore_unicodeobject_generated.h4
-rw-r--r--Include/internal/pycore_uop_ids.h1
-rw-r--r--Include/internal/pycore_uop_metadata.h6
-rw-r--r--Include/object.h12
-rw-r--r--Include/pymacro.h5
-rw-r--r--Include/refcount.h3
-rw-r--r--Include/sysmodule.h6
-rw-r--r--InternalDocs/exception_handling.md10
-rw-r--r--Lib/_pydecimal.py4
-rw-r--r--Lib/_pyrepl/_module_completer.py4
-rw-r--r--Lib/_pyrepl/readline.py5
-rw-r--r--Lib/argparse.py2
-rw-r--r--Lib/ast.py28
-rw-r--r--Lib/code.py2
-rw-r--r--Lib/compression/zstd/_zstdfile.py3
-rw-r--r--Lib/ctypes/__init__.py6
-rw-r--r--Lib/dbm/dumb.py32
-rw-r--r--Lib/dbm/sqlite3.py4
-rw-r--r--Lib/difflib.py50
-rw-r--r--Lib/doctest.py113
-rw-r--r--Lib/fractions.py4
-rw-r--r--Lib/genericpath.py11
-rw-r--r--Lib/hashlib.py12
-rw-r--r--Lib/idlelib/News3.txt7
-rw-r--r--Lib/ipaddress.py10
-rw-r--r--Lib/json/encoder.py5
-rw-r--r--Lib/ntpath.py38
-rw-r--r--Lib/posixpath.py57
-rw-r--r--Lib/shelve.py5
-rw-r--r--Lib/tarfile.py163
-rw-r--r--Lib/test/_code_definitions.py20
-rw-r--r--Lib/test/pythoninfo.py13
-rw-r--r--Lib/test/subprocessdata/fd_status.py4
-rw-r--r--Lib/test/support/__init__.py4
-rw-r--r--Lib/test/support/interpreters/__init__.py31
-rw-r--r--Lib/test/support/interpreters/channels.py2
-rw-r--r--Lib/test/test__interpreters.py25
-rw-r--r--Lib/test/test_ast/test_ast.py95
-rw-r--r--Lib/test/test_asyncgen.py9
-rw-r--r--Lib/test/test_capi/test_sys.py64
-rw-r--r--Lib/test/test_capi/test_type.py10
-rw-r--r--Lib/test/test_capi/test_unicode.py7
-rw-r--r--Lib/test/test_class.py1
-rw-r--r--Lib/test/test_code.py48
-rw-r--r--Lib/test/test_codecs.py7
-rw-r--r--Lib/test/test_csv.py48
-rw-r--r--Lib/test/test_ctypes/test_incomplete.py10
-rw-r--r--Lib/test/test_dbm.py61
-rw-r--r--Lib/test/test_dbm_gnu.py27
-rw-r--r--Lib/test/test_decimal.py2
-rw-r--r--Lib/test/test_descr.py28
-rw-r--r--Lib/test/test_difflib.py6
-rw-r--r--Lib/test/test_difflib_expect.html48
-rw-r--r--Lib/test/test_dis.py207
-rw-r--r--Lib/test/test_doctest/sample_doctest_errors.py46
-rw-r--r--Lib/test/test_doctest/test_doctest.py447
-rw-r--r--Lib/test/test_doctest/test_doctest_errors.txt14
-rw-r--r--Lib/test/test_doctest/test_doctest_skip.txt2
-rw-r--r--Lib/test/test_doctest/test_doctest_skip2.txt6
-rw-r--r--Lib/test/test_external_inspection.py82
-rw-r--r--Lib/test/test_fcntl.py13
-rw-r--r--Lib/test/test_fractions.py8
-rw-r--r--Lib/test/test_free_threading/test_itertools.py (renamed from Lib/test/test_free_threading/test_itertools_batched.py)32
-rw-r--r--Lib/test/test_grammar.py14
-rw-r--r--Lib/test/test_hashlib.py95
-rw-r--r--Lib/test/test_interpreters/test_api.py552
-rw-r--r--Lib/test/test_io.py31
-rw-r--r--Lib/test/test_ioctl.py13
-rw-r--r--Lib/test/test_ipaddress.py19
-rw-r--r--Lib/test/test_json/test_dump.py8
-rw-r--r--Lib/test/test_list.py15
-rw-r--r--Lib/test/test_math.py28
-rw-r--r--Lib/test/test_monitoring.py15
-rw-r--r--Lib/test/test_ntpath.py214
-rw-r--r--Lib/test/test_posixpath.py252
-rw-r--r--Lib/test/test_pyrepl/test_pyrepl.py39
-rw-r--r--Lib/test/test_queue.py20
-rw-r--r--Lib/test/test_random.py28
-rw-r--r--Lib/test/test_readline.py8
-rw-r--r--Lib/test/test_regrtest.py7
-rw-r--r--Lib/test/test_ssl.py43
-rw-r--r--Lib/test/test_stable_abi_ctypes.py4
-rw-r--r--Lib/test/test_sys.py26
-rw-r--r--Lib/test/test_tarfile.py310
-rw-r--r--Lib/test/test_tokenize.py74
-rw-r--r--Lib/test/test_traceback.py29
-rwxr-xr-xLib/test/test_uuid.py28
-rw-r--r--Lib/test/test_zlib.py108
-rw-r--r--Lib/test/test_zstd.py241
-rw-r--r--Lib/tokenize.py2
-rw-r--r--Lib/traceback.py13
-rw-r--r--Lib/uuid.py6
-rw-r--r--Misc/ACKS1
-rw-r--r--Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst3
-rw-r--r--Misc/NEWS.d/next/C_API/2023-10-18-14-36-35.gh-issue-108512.fMZLfr.rst2
-rw-r--r--Misc/NEWS.d/next/C_API/2025-05-13-16-06-46.gh-issue-133968.6alWst.rst4
-rw-r--r--Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst3
-rw-r--r--Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-03-14-13-08-20.gh-issue-127266._tyfBp.rst6
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-04-30-14-13-01.gh-issue-132554.GqQaUp.rst4
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-18-14-33-23.gh-issue-69605.ZMO49F.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-26-15-55-50.gh-issue-133912.-xAguL.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-18-59-54.gh-issue-134679.FWPBu6.rst2
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-20-21-34.gh-issue-131798.b32zkl.rst1
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst1
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-29-11-48-46.gh-issue-132876.lyTQGZ.rst4
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-11-10-01-48.gh-issue-133866.g3dHP_.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-23-20-01-52.gh-issue-134580.xnaJ70.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-24-13-10-35.gh-issue-134210.0IuMY2.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-25-13-46-37.gh-issue-134635.ZlPrlX.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-26-11-01-54.gh-issue-134531.my1Fzt.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-26-12-31-08.gh-issue-132710.ApU3TZ.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst5
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-26-17-06-40.gh-issue-134637.9-3zRL.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-26-22-18-32.gh-issue-134771.RKXpLT.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-27-11-13-51.gh-issue-133579.KY9M6S.rst8
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-27-11-18-13.gh-issue-133579.ohtgdC.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-27-11-24-38.gh-issue-133579.WGPUC1.rst7
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-28-15-53-27.gh-issue-128840.Nur2pB.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst7
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst3
-rw-r--r--Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst6
-rw-r--r--Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst2
-rw-r--r--Misc/stable_abi.toml8
-rw-r--r--Modules/_csv.c73
-rw-r--r--Modules/_ctypes/_ctypes.c142
-rw-r--r--Modules/_cursesmodule.c621
-rw-r--r--Modules/_gdbmmodule.c8
-rw-r--r--Modules/_hashopenssl.c468
-rw-r--r--Modules/_interpchannelsmodule.c8
-rw-r--r--Modules/_interpqueuesmodule.c8
-rw-r--r--Modules/_interpretersmodule.c652
-rw-r--r--Modules/_io/textio.c15
-rw-r--r--Modules/_json.c43
-rw-r--r--Modules/_lsprof.c6
-rw-r--r--Modules/_pickle.c2
-rw-r--r--Modules/_randommodule.c20
-rw-r--r--Modules/_remote_debugging_module.c377
-rw-r--r--Modules/_ssl.c2
-rw-r--r--Modules/_testcapi/unicode.c22
-rw-r--r--Modules/_testcapimodule.c49
-rw-r--r--Modules/_testinternalcapi.c3
-rw-r--r--Modules/_testlimitedcapi/sys.c73
-rw-r--r--Modules/_threadmodule.c3
-rw-r--r--Modules/_tkinter.c5
-rw-r--r--Modules/_uuidmodule.c40
-rw-r--r--Modules/_winapi.c14
-rw-r--r--Modules/_zstd/_zstdmodule.c111
-rw-r--r--Modules/_zstd/_zstdmodule.h13
-rw-r--r--Modules/_zstd/clinic/zstddict.c.h43
-rw-r--r--Modules/_zstd/compressor.c257
-rw-r--r--Modules/_zstd/decompressor.c115
-rw-r--r--Modules/_zstd/zstddict.c89
-rw-r--r--Modules/_zstd/zstddict.h6
-rw-r--r--Modules/blake2module.c30
-rw-r--r--Modules/clinic/_cursesmodule.c.h34
-rw-r--r--Modules/clinic/_hashopenssl.c.h549
-rw-r--r--Modules/clinic/_randommodule.c.h10
-rw-r--r--Modules/clinic/_remote_debugging_module.c.h33
-rw-r--r--Modules/clinic/_winapi.c.h20
-rw-r--r--Modules/clinic/blake2module.c.h98
-rw-r--r--Modules/clinic/mathmodule.c.h70
-rw-r--r--Modules/clinic/md5module.c.h34
-rw-r--r--Modules/clinic/sha1module.c.h34
-rw-r--r--Modules/clinic/sha2module.c.h130
-rw-r--r--Modules/clinic/sha3module.c.h128
-rw-r--r--Modules/clinic/zlibmodule.c.h120
-rw-r--r--Modules/faulthandler.c3
-rw-r--r--Modules/fcntlmodule.c6
-rw-r--r--Modules/hashlib.h38
-rw-r--r--Modules/itertoolsmodule.c27
-rw-r--r--Modules/main.c13
-rw-r--r--Modules/mathmodule.c61
-rw-r--r--Modules/md5module.c13
-rw-r--r--Modules/posixmodule.c24
-rw-r--r--Modules/sha1module.c12
-rw-r--r--Modules/sha2module.c49
-rw-r--r--Modules/sha3module.c21
-rw-r--r--Modules/syslogmodule.c3
-rw-r--r--Modules/timemodule.c2
-rw-r--r--Modules/zlibmodule.c88
-rw-r--r--Objects/call.c35
-rw-r--r--Objects/frameobject.c4
-rw-r--r--Objects/funcobject.c34
-rw-r--r--Objects/genericaliasobject.c6
-rw-r--r--Objects/genobject.c49
-rw-r--r--Objects/longobject.c13
-rw-r--r--Objects/moduleobject.c3
-rw-r--r--Objects/object.c141
-rw-r--r--Objects/obmalloc.c22
-rw-r--r--Objects/typeobject.c684
-rw-r--r--Objects/typevarobject.c4
-rw-r--r--Objects/unicodeobject.c14
-rw-r--r--Objects/unionobject.c8
-rwxr-xr-xPC/python3dll.c4
-rw-r--r--PCbuild/_testclinic_limited.vcxproj1
-rwxr-xr-xPCbuild/get_external.py23
-rw-r--r--PCbuild/pyproject.props11
-rwxr-xr-xParser/asdl_c.py6
-rw-r--r--Parser/parser.c2
-rw-r--r--Programs/test_frozenmain.h2
-rw-r--r--Python/Python-ast.c6
-rw-r--r--Python/_warnings.c3
-rw-r--r--Python/asm_trampoline.S22
-rw-r--r--Python/bltinmodule.c11
-rw-r--r--Python/bytecodes.c300
-rw-r--r--Python/ceval.c36
-rw-r--r--Python/codegen.c41
-rw-r--r--Python/context.c10
-rw-r--r--Python/crossinterp.c456
-rw-r--r--Python/crossinterp_data_lookup.h8
-rw-r--r--Python/errors.c5
-rw-r--r--Python/executor_cases.c.h189
-rw-r--r--Python/flowgraph.c17
-rw-r--r--Python/gc_free_threading.c5
-rw-r--r--Python/generated_cases.c.h289
-rw-r--r--Python/hamt.c6
-rw-r--r--Python/import.c16
-rw-r--r--Python/initconfig.c8
-rw-r--r--Python/intrinsics.c3
-rw-r--r--Python/lock.c3
-rw-r--r--Python/modsupport.c2
-rw-r--r--Python/optimizer_bytecodes.c11
-rw-r--r--Python/optimizer_cases.c.h17
-rw-r--r--Python/parking_lot.c22
-rw-r--r--Python/perf_jit_trampoline.c4
-rw-r--r--Python/pylifecycle.c8
-rw-r--r--Python/pythonrun.c18
-rw-r--r--Python/remote_debug.h283
-rw-r--r--Python/specialize.c78
-rw-r--r--Python/stackrefs.c8
-rw-r--r--Python/sysmodule.c36
-rw-r--r--Python/thread.c82
-rw-r--r--Python/thread_nt.h92
-rw-r--r--Python/thread_pthread.h392
-rw-r--r--Python/traceback.c5
-rw-r--r--README.rst12
-rw-r--r--Tools/build/generate_sbom.py13
-rw-r--r--Tools/cases_generator/analyzer.py3
-rw-r--r--Tools/cases_generator/generators_common.py1
-rw-r--r--Tools/ftscalingbench/ftscalingbench.py13
-rw-r--r--Tools/inspection/benchmark_external_inspection.py459
-rw-r--r--Tools/msi/freethreaded/freethreaded_files.wxs2
-rw-r--r--Tools/msi/lib/lib_files.wxs2
-rw-r--r--Tools/peg_generator/pegen/c_generator.py2
-rw-r--r--Tools/tsan/suppressions_free_threading.txt7
-rwxr-xr-xconfigure254
-rw-r--r--configure.ac126
-rw-r--r--iOS/testbed/__main__.py2
-rw-r--r--pyconfig.h.in16
345 files changed, 11019 insertions, 4782 deletions
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 64c85c1101e..8e09808f08b 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "image": "ghcr.io/python/devcontainer:2024.09.25.11038928730",
+ "image": "ghcr.io/python/devcontainer:2025.05.29.15334414373",
"onCreateCommand": [
// Install common tooling.
"dnf",
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 88b95766982..775d9c63260 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -331,3 +331,8 @@ Modules/_xxtestfuzz/ @ammaraskar
**/*templateobject* @lysnikolaou
**/*templatelib* @lysnikolaou
**/*tstring* @lysnikolaou
+
+# Remote debugging
+Python/remote_debug.h @pablogsal
+Python/remote_debugging.c @pablogsal
+Modules/_remote_debugging_module.c @pablogsal @ambv @1st1
diff --git a/.github/workflows/tail-call.yml b/.github/workflows/tail-call.yml
index 4636372e26c..e32cbf0aaa3 100644
--- a/.github/workflows/tail-call.yml
+++ b/.github/workflows/tail-call.yml
@@ -137,4 +137,3 @@ jobs:
CC=clang-20 ./configure --with-tail-call-interp --disable-gil
make all --jobs 4
./python -m test --multiprocess 0 --timeout 4500 --verbose2 --verbose3
-
diff --git a/.gitignore b/.gitignore
index 2a6f249275c..cdb0352e0a8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -171,5 +171,10 @@ Python/frozen_modules/MANIFEST
/python
!/Python/
+# People's custom https://docs.anthropic.com/en/docs/claude-code/memory configs.
+/.claude/
+CLAUDE.local.md
+
+#### main branch only stuff below this line, things to backport go above. ####
# main branch only: ABI files are not checked/maintained.
Doc/data/python*.abi
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3632cf39203..822a8a9f4e5 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -43,12 +43,14 @@ repos:
exclude: ^Lib/test/test_tomllib/
- id: check-yaml
- id: end-of-file-fixer
- types: [python]
+ types_or: [python, yaml]
exclude: Lib/test/tokenizedata/coding20731.py
+ - id: end-of-file-fixer
+ files: '^\.github/CODEOWNERS$'
- id: trailing-whitespace
- types_or: [c, inc, python, rst]
+ types_or: [c, inc, python, rst, yaml]
- id: trailing-whitespace
- files: '\.(gram)$'
+ files: '^\.github/CODEOWNERS|\.(gram)$'
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.33.0
diff --git a/.readthedocs.yml b/.readthedocs.yml
index a57de00544e..0a2c3f83453 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -32,4 +32,3 @@ build:
- make -C Doc venv html
- mkdir _readthedocs
- mv Doc/build/html _readthedocs/html
-
diff --git a/Doc/c-api/allocation.rst b/Doc/c-api/allocation.rst
index f8d01a3f29b..59d913a0462 100644
--- a/Doc/c-api/allocation.rst
+++ b/Doc/c-api/allocation.rst
@@ -153,6 +153,6 @@ Allocating Objects on the Heap
.. seealso::
- :c:func:`PyModule_Create`
+ :ref:`moduleobjects`
To allocate and create extension modules.
diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst
index e1931655618..4fd10224262 100644
--- a/Doc/c-api/init_config.rst
+++ b/Doc/c-api/init_config.rst
@@ -2111,7 +2111,7 @@ initialization::
/* Specify sys.path explicitly */
/* If you want to modify the default set of paths, finish
- initialization first and then use PySys_GetObject("path") */
+ initialization first and then use PySys_GetAttrString("path") */
config.module_search_paths_set = 1;
status = PyWideStringList_Append(&config.module_search_paths,
L"/path/to/stdlib");
diff --git a/Doc/c-api/intro.rst b/Doc/c-api/intro.rst
index 0c20ad17194..41856922110 100644
--- a/Doc/c-api/intro.rst
+++ b/Doc/c-api/intro.rst
@@ -127,7 +127,7 @@ complete listing.
item defined in the module file. Example::
static struct PyModuleDef spam_module = {
- PyModuleDef_HEAD_INIT,
+ .m_base = PyModuleDef_HEAD_INIT,
.m_name = "spam",
...
};
@@ -135,7 +135,7 @@ complete listing.
PyMODINIT_FUNC
PyInit_spam(void)
{
- return PyModule_Create(&spam_module);
+ return PyModuleDef_Init(&spam_module);
}
@@ -838,3 +838,41 @@ after every statement run by the interpreter.)
Please refer to :file:`Misc/SpecialBuilds.txt` in the Python source distribution
for more detailed information.
+
+
+.. _c-api-tools:
+
+Recommended third party tools
+=============================
+
+The following third party tools offer both simpler and more sophisticated
+approaches to creating C, C++ and Rust extensions for Python:
+
+* `Cython <https://cython.org/>`_
+* `cffi <https://cffi.readthedocs.io>`_
+* `HPy <https://hpyproject.org/>`_
+* `nanobind <https://github.com/wjakob/nanobind>`_ (C++)
+* `Numba <https://numba.pydata.org/>`_
+* `pybind11 <https://pybind11.readthedocs.io/>`_ (C++)
+* `PyO3 <https://pyo3.rs/>`_ (Rust)
+* `SWIG <https://www.swig.org>`_
+
+Using tools such as these can help avoid writing code that is tightly bound to
+a particular version of CPython, avoid reference counting errors, and focus
+more on your own code than on using the CPython API. In general, new versions
+of Python can be supported by updating the tool, and your code will often use
+newer and more efficient APIs automatically. Some tools also support compiling
+for other implementations of Python from a single set of sources.
+
+These projects are not supported by the same people who maintain Python, and
+issues need to be raised with the projects directly. Remember to check that the
+project is still maintained and supported, as the list above may become
+outdated.
+
+.. seealso::
+
+ `Python Packaging User Guide: Binary Extensions <https://packaging.python.org/guides/packaging-binary-extensions/>`_
+ The Python Packaging User Guide not only covers several available
+ tools that simplify the creation of binary extensions, but also
+ discusses the various reasons why creating an extension module may be
+ desirable in the first place.
diff --git a/Doc/c-api/lifecycle.rst b/Doc/c-api/lifecycle.rst
index 0e2ffc096ca..5a170862a26 100644
--- a/Doc/c-api/lifecycle.rst
+++ b/Doc/c-api/lifecycle.rst
@@ -55,16 +55,14 @@ that must be true for *B* to occur after *A*.
.. image:: lifecycle.dot.svg
:align: center
:class: invert-in-dark-mode
- :alt: Diagram showing events in an object's life. Explained in detail
- below.
+ :alt: Diagram showing events in an object's life. Explained in detail below.
.. only:: latex
.. image:: lifecycle.dot.pdf
:align: center
:class: invert-in-dark-mode
- :alt: Diagram showing events in an object's life. Explained in detail
- below.
+ :alt: Diagram showing events in an object's life. Explained in detail below.
.. container::
:name: life-events-graph-description
diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst
index f7f4d37d4c7..710135dca89 100644
--- a/Doc/c-api/module.rst
+++ b/Doc/c-api/module.rst
@@ -288,22 +288,40 @@ An alternate way to specify extensions is to request "multi-phase initialization
Extension modules created this way behave more like Python modules: the
initialization is split between the *creation phase*, when the module object
is created, and the *execution phase*, when it is populated.
-The distinction is similar to the :py:meth:`!__new__` and :py:meth:`!__init__` methods
-of classes.
+The distinction is similar to the :py:meth:`~object.__new__` and
+:py:meth:`~object.__init__` methods of classes.
Unlike modules created using single-phase initialization, these modules are not
-singletons: if the *sys.modules* entry is removed and the module is re-imported,
-a new module object is created, and the old module is subject to normal garbage
-collection -- as with Python modules.
-By default, multiple modules created from the same definition should be
-independent: changes to one should not affect the others.
-This means that all state should be specific to the module object (using e.g.
-using :c:func:`PyModule_GetState`), or its contents (such as the module's
-:attr:`~object.__dict__` or individual classes created with :c:func:`PyType_FromSpec`).
+singletons.
+For example, if the :py:attr:`sys.modules` entry is removed and the module
+is re-imported, a new module object is created, and typically populated with
+fresh method and type objects.
+The old module is subject to normal garbage collection.
+This mirrors the behavior of pure-Python modules.
+
+Additional module instances may be created in
+:ref:`sub-interpreters <sub-interpreter-support>`
+or after after Python runtime reinitialization
+(:c:func:`Py_Finalize` and :c:func:`Py_Initialize`).
+In these cases, sharing Python objects between module instances would likely
+cause crashes or undefined behavior.
+
+To avoid such issues, each instance of an extension module should
+be *isolated*: changes to one instance should not implicitly affect the others,
+and all state, including references to Python objects, should be specific to
+a particular module instance.
+See :ref:`isolating-extensions-howto` for more details and a practical guide.
+
+A simpler way to avoid these issues is
+:ref:`raising an error on repeated initialization <isolating-extensions-optout>`.
All modules created using multi-phase initialization are expected to support
-:ref:`sub-interpreters <sub-interpreter-support>`. Making sure multiple modules
-are independent is typically enough to achieve this.
+:ref:`sub-interpreters <sub-interpreter-support>`, or otherwise explicitly
+signal a lack of support.
+This is usually achieved by isolation or blocking repeated initialization,
+as above.
+A module may also be limited to the main interpreter using
+the :c:data:`Py_mod_multiple_interpreters` slot.
To request multi-phase initialization, the initialization function
(PyInit_modulename) returns a :c:type:`PyModuleDef` instance with non-empty
diff --git a/Doc/c-api/stable.rst b/Doc/c-api/stable.rst
index 124e58cf950..9b65e0b8d23 100644
--- a/Doc/c-api/stable.rst
+++ b/Doc/c-api/stable.rst
@@ -51,6 +51,7 @@ It is generally intended for specialized, low-level tools like debuggers.
Projects that use this API are expected to follow
CPython development and spend extra effort adjusting to changes.
+.. _stable-application-binary-interface:
Stable Application Binary Interface
===================================
diff --git a/Doc/c-api/sys.rst b/Doc/c-api/sys.rst
index b3c89800e38..b34936dd55e 100644
--- a/Doc/c-api/sys.rst
+++ b/Doc/c-api/sys.rst
@@ -258,10 +258,57 @@ These are utility functions that make functionality from the :mod:`sys` module
accessible to C code. They all work with the current interpreter thread's
:mod:`sys` module's dict, which is contained in the internal thread state structure.
+.. c:function:: PyObject *PySys_GetAttr(PyObject *name)
+
+ Get the attribute *name* of the :mod:`sys` module.
+ Return a :term:`strong reference`.
+ Raise :exc:`RuntimeError` and return ``NULL`` if it does not exist or
+ if the :mod:`sys` module cannot be found.
+
+ If the non-existing object should not be treated as a failure, you can use
+ :c:func:`PySys_GetOptionalAttr` instead.
+
+ .. versionadded:: next
+
+.. c:function:: PyObject *PySys_GetAttrString(const char *name)
+
+ This is the same as :c:func:`PySys_GetAttr`, but *name* is
+ specified as a :c:expr:`const char*` UTF-8 encoded bytes string,
+ rather than a :c:expr:`PyObject*`.
+
+ If the non-existing object should not be treated as a failure, you can use
+ :c:func:`PySys_GetOptionalAttrString` instead.
+
+ .. versionadded:: next
+
+.. c:function:: int PySys_GetOptionalAttr(PyObject *name, PyObject **result)
+
+ Variant of :c:func:`PySys_GetAttr` which doesn't raise
+ exception if the object does not exist.
+
+ * Set *\*result* to a new :term:`strong reference` to the object and
+ return ``1`` if the object exists.
+ * Set *\*result* to ``NULL`` and return ``0`` without setting an exception
+ if the object does not exist.
+ * Set an exception, set *\*result* to ``NULL``, and return ``-1``,
+ if an error occurred.
+
+ .. versionadded:: next
+
+.. c:function:: int PySys_GetOptionalAttrString(const char *name, PyObject **result)
+
+ This is the same as :c:func:`PySys_GetOptionalAttr`, but *name* is
+ specified as a :c:expr:`const char*` UTF-8 encoded bytes string,
+ rather than a :c:expr:`PyObject*`.
+
+ .. versionadded:: next
+
.. c:function:: PyObject *PySys_GetObject(const char *name)
- Return the object *name* from the :mod:`sys` module or ``NULL`` if it does
- not exist, without setting an exception.
+ Similar to :c:func:`PySys_GetAttrString`, but return a :term:`borrowed
+ reference` and return ``NULL`` *without* setting exception on failure.
+
+ Preserves exception that was set before the call.
.. c:function:: int PySys_SetObject(const char *name, PyObject *v)
diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst
index cdd90d05b70..45f50ba5f97 100644
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -1461,10 +1461,6 @@ the user settings on the machine running the codec.
.. versionadded:: 3.3
-Methods & Slots
-"""""""""""""""
-
-
.. _unicodemethodsandslots:
Methods and Slot Functions
@@ -1726,10 +1722,6 @@ They all return ``NULL`` or ``-1`` if an exception occurs.
from user input, prefer calling :c:func:`PyUnicode_FromString` and
:c:func:`PyUnicode_InternInPlace` directly.
- .. impl-detail::
-
- Strings interned this way are made :term:`immortal`.
-
.. c:function:: unsigned int PyUnicode_CHECK_INTERNED(PyObject *str)
@@ -1806,9 +1798,24 @@ object.
See also :c:func:`PyUnicodeWriter_DecodeUTF8Stateful`.
+.. c:function:: int PyUnicodeWriter_WriteASCII(PyUnicodeWriter *writer, const char *str, Py_ssize_t size)
+
+ Write the ASCII string *str* into *writer*.
+
+ *size* is the string length in bytes. If *size* is equal to ``-1``, call
+ ``strlen(str)`` to get the string length.
+
+ *str* must only contain ASCII characters. The behavior is undefined if
+ *str* contains non-ASCII characters.
+
+ On success, return ``0``.
+ On error, set an exception, leave the writer unchanged, and return ``-1``.
+
+ .. versionadded:: next
+
.. c:function:: int PyUnicodeWriter_WriteWideChar(PyUnicodeWriter *writer, const wchar_t *str, Py_ssize_t size)
- Writer the wide string *str* into *writer*.
+ Write the wide string *str* into *writer*.
*size* is a number of wide characters. If *size* is equal to ``-1``, call
``wcslen(str)`` to get the string length.
diff --git a/Doc/data/stable_abi.dat b/Doc/data/stable_abi.dat
index e71a40e55e9..0d0dfb38432 100644
--- a/Doc/data/stable_abi.dat
+++ b/Doc/data/stable_abi.dat
@@ -628,7 +628,11 @@ func,PySys_Audit,3.13,,
func,PySys_AuditTuple,3.13,,
func,PySys_FormatStderr,3.2,,
func,PySys_FormatStdout,3.2,,
+func,PySys_GetAttr,3.15,,
+func,PySys_GetAttrString,3.15,,
func,PySys_GetObject,3.2,,
+func,PySys_GetOptionalAttr,3.15,,
+func,PySys_GetOptionalAttrString,3.15,,
func,PySys_GetXOptions,3.7,,
func,PySys_ResetWarnOptions,3.2,,
func,PySys_SetArgv,3.2,,
diff --git a/Doc/deprecations/pending-removal-in-3.19.rst b/Doc/deprecations/pending-removal-in-3.19.rst
index 3936f63ca5b..25f9cba390d 100644
--- a/Doc/deprecations/pending-removal-in-3.19.rst
+++ b/Doc/deprecations/pending-removal-in-3.19.rst
@@ -6,3 +6,19 @@ Pending removal in Python 3.19
* Implicitly switching to the MSVC-compatible struct layout by setting
:attr:`~ctypes.Structure._pack_` but not :attr:`~ctypes.Structure._layout_`
on non-Windows platforms.
+
+* :mod:`hashlib`:
+
+ - In hash function constructors such as :func:`~hashlib.new` or the
+ direct hash-named constructors such as :func:`~hashlib.md5` and
+ :func:`~hashlib.sha256`, their optional initial data parameter could
+ also be passed a keyword argument named ``data=`` or ``string=`` in
+ various :mod:`!hashlib` implementations.
+
+ Support for the ``string`` keyword argument name is now deprecated
+ and slated for removal in Python 3.19.
+
+ Before Python 3.13, the ``string`` keyword parameter was not correctly
+ supported depending on the backend implementation of hash functions.
+ Prefer passing the initial data as a positional argument for maximum
+ backwards compatibility.
diff --git a/Doc/extending/building.rst b/Doc/extending/building.rst
index ddde567f6f3..a58eb40d431 100644
--- a/Doc/extending/building.rst
+++ b/Doc/extending/building.rst
@@ -23,10 +23,10 @@ instance. See :ref:`initializing-modules` for details.
.. highlight:: python
For modules with ASCII-only names, the function must be named
-``PyInit_<modulename>``, with ``<modulename>`` replaced by the name of the
-module. When using :ref:`multi-phase-initialization`, non-ASCII module names
+:samp:`PyInit_{<name>}`, with ``<name>`` replaced by the name of the module.
+When using :ref:`multi-phase-initialization`, non-ASCII module names
are allowed. In this case, the initialization function name is
-``PyInitU_<modulename>``, with ``<modulename>`` encoded using Python's
+:samp:`PyInitU_{<name>}`, with ``<name>`` encoded using Python's
*punycode* encoding with hyphens replaced by underscores. In Python::
def initfunc_name(name):
diff --git a/Doc/extending/embedding.rst b/Doc/extending/embedding.rst
index b777862da79..cb41889437c 100644
--- a/Doc/extending/embedding.rst
+++ b/Doc/extending/embedding.rst
@@ -245,21 +245,23 @@ Python extension. For example::
return PyLong_FromLong(numargs);
}
- static PyMethodDef EmbMethods[] = {
+ static PyMethodDef emb_module_methods[] = {
{"numargs", emb_numargs, METH_VARARGS,
"Return the number of arguments received by the process."},
{NULL, NULL, 0, NULL}
};
- static PyModuleDef EmbModule = {
- PyModuleDef_HEAD_INIT, "emb", NULL, -1, EmbMethods,
- NULL, NULL, NULL, NULL
+ static struct PyModuleDef emb_module = {
+ .m_base = PyModuleDef_HEAD_INIT,
+ .m_name = "emb",
+ .m_size = 0,
+ .m_methods = emb_module_methods,
};
static PyObject*
PyInit_emb(void)
{
- return PyModule_Create(&EmbModule);
+ return PyModuleDef_Init(&emb_module);
}
Insert the above code just above the :c:func:`main` function. Also, insert the
diff --git a/Doc/extending/extending.rst b/Doc/extending/extending.rst
index b0493bed75b..fd634956746 100644
--- a/Doc/extending/extending.rst
+++ b/Doc/extending/extending.rst
@@ -203,31 +203,57 @@ function usually raises :c:data:`PyExc_TypeError`. If you have an argument whos
value must be in a particular range or must satisfy other conditions,
:c:data:`PyExc_ValueError` is appropriate.
-You can also define a new exception that is unique to your module. For this, you
-usually declare a static object variable at the beginning of your file::
+You can also define a new exception that is unique to your module.
+The simplest way to do this is to declare a static global object variable at
+the beginning of the file::
- static PyObject *SpamError;
+ static PyObject *SpamError = NULL;
-and initialize it in your module's initialization function (:c:func:`!PyInit_spam`)
-with an exception object::
+and initialize it by calling :c:func:`PyErr_NewException` in the module's
+:c:data:`Py_mod_exec` function (:c:func:`!spam_module_exec`)::
- PyMODINIT_FUNC
- PyInit_spam(void)
- {
- PyObject *m;
+ SpamError = PyErr_NewException("spam.error", NULL, NULL);
- m = PyModule_Create(&spammodule);
- if (m == NULL)
- return NULL;
+Since :c:data:`!SpamError` is a global variable, it will be overwitten every time
+the module is reinitialized, when the :c:data:`Py_mod_exec` function is called.
+
+For now, let's avoid the issue: we will block repeated initialization by raising an
+:py:exc:`ImportError`::
+ static PyObject *SpamError = NULL;
+
+ static int
+ spam_module_exec(PyObject *m)
+ {
+ if (SpamError != NULL) {
+ PyErr_SetString(PyExc_ImportError,
+ "cannot initialize spam module more than once");
+ return -1;
+ }
SpamError = PyErr_NewException("spam.error", NULL, NULL);
- if (PyModule_AddObjectRef(m, "error", SpamError) < 0) {
- Py_CLEAR(SpamError);
- Py_DECREF(m);
- return NULL;
+ if (PyModule_AddObjectRef(m, "SpamError", SpamError) < 0) {
+ return -1;
}
- return m;
+ return 0;
+ }
+
+ static PyModuleDef_Slot spam_module_slots[] = {
+ {Py_mod_exec, spam_module_exec},
+ {0, NULL}
+ };
+
+ static struct PyModuleDef spam_module = {
+ .m_base = PyModuleDef_HEAD_INIT,
+ .m_name = "spam",
+ .m_size = 0, // non-negative
+ .m_slots = spam_module_slots,
+ };
+
+ PyMODINIT_FUNC
+ PyInit_spam(void)
+ {
+ return PyModuleDef_Init(&spam_module);
}
Note that the Python name for the exception object is :exc:`!spam.error`. The
@@ -242,6 +268,11 @@ needed to ensure that it will not be discarded, causing :c:data:`!SpamError` to
become a dangling pointer. Should it become a dangling pointer, C code which
raises the exception could cause a core dump or other unintended side effects.
+For now, the :c:func:`Py_DECREF` call to remove this reference is missing.
+Even when the Python interpreter shuts down, the global :c:data:`!SpamError`
+variable will not be garbage-collected. It will "leak".
+We did, however, ensure that this will happen at most once per process.
+
We discuss the use of :c:macro:`PyMODINIT_FUNC` as a function return type later in this
sample.
@@ -318,7 +349,7 @@ The Module's Method Table and Initialization Function
I promised to show how :c:func:`!spam_system` is called from Python programs.
First, we need to list its name and address in a "method table"::
- static PyMethodDef SpamMethods[] = {
+ static PyMethodDef spam_methods[] = {
...
{"system", spam_system, METH_VARARGS,
"Execute a shell command."},
@@ -343,13 +374,10 @@ function.
The method table must be referenced in the module definition structure::
- static struct PyModuleDef spammodule = {
- PyModuleDef_HEAD_INIT,
- "spam", /* name of module */
- spam_doc, /* module documentation, may be NULL */
- -1, /* size of per-interpreter state of the module,
- or -1 if the module keeps state in global variables. */
- SpamMethods
+ static struct PyModuleDef spam_module = {
+ ...
+ .m_methods = spam_methods,
+ ...
};
This structure, in turn, must be passed to the interpreter in the module's
@@ -360,23 +388,17 @@ only non-\ ``static`` item defined in the module file::
PyMODINIT_FUNC
PyInit_spam(void)
{
- return PyModule_Create(&spammodule);
+ return PyModuleDef_Init(&spam_module);
}
Note that :c:macro:`PyMODINIT_FUNC` declares the function as ``PyObject *`` return type,
declares any special linkage declarations required by the platform, and for C++
declares the function as ``extern "C"``.
-When the Python program imports module :mod:`!spam` for the first time,
-:c:func:`!PyInit_spam` is called. (See below for comments about embedding Python.)
-It calls :c:func:`PyModule_Create`, which returns a module object, and
-inserts built-in function objects into the newly created module based upon the
-table (an array of :c:type:`PyMethodDef` structures) found in the module definition.
-:c:func:`PyModule_Create` returns a pointer to the module object
-that it creates. It may abort with a fatal error for
-certain errors, or return ``NULL`` if the module could not be initialized
-satisfactorily. The init function must return the module object to its caller,
-so that it then gets inserted into ``sys.modules``.
+:c:func:`!PyInit_spam` is called when each interpreter imports its module
+:mod:`!spam` for the first time. (See below for comments about embedding Python.)
+A pointer to the module definition must be returned via :c:func:`PyModuleDef_Init`,
+so that the import machinery can create the module and store it in ``sys.modules``.
When embedding Python, the :c:func:`!PyInit_spam` function is not called
automatically unless there's an entry in the :c:data:`PyImport_Inittab` table.
@@ -433,23 +455,19 @@ optionally followed by an import of the module::
.. note::
- Removing entries from ``sys.modules`` or importing compiled modules into
- multiple interpreters within a process (or following a :c:func:`fork` without an
- intervening :c:func:`exec`) can create problems for some extension modules.
- Extension module authors should exercise caution when initializing internal data
- structures.
+ If you declare a global variable or a local static one, the module may
+ experience unintended side-effects on re-initialisation, for example when
+ removing entries from ``sys.modules`` or importing compiled modules into
+ multiple interpreters within a process
+ (or following a :c:func:`fork` without an intervening :c:func:`exec`).
+ If module state is not yet fully :ref:`isolated <isolating-extensions-howto>`,
+ authors should consider marking the module as having no support for subinterpreters
+ (via :c:macro:`Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED`).
A more substantial example module is included in the Python source distribution
-as :file:`Modules/xxmodule.c`. This file may be used as a template or simply
+as :file:`Modules/xxlimited.c`. This file may be used as a template or simply
read as an example.
-.. note::
-
- Unlike our ``spam`` example, ``xxmodule`` uses *multi-phase initialization*
- (new in Python 3.5), where a PyModuleDef structure is returned from
- ``PyInit_spam``, and creation of the module is left to the import machinery.
- For details on multi-phase initialization, see :PEP:`489`.
-
.. _compilation:
@@ -790,18 +808,17 @@ Philbrick (philbrick@hks.com)::
{NULL, NULL, 0, NULL} /* sentinel */
};
- static struct PyModuleDef keywdargmodule = {
- PyModuleDef_HEAD_INIT,
- "keywdarg",
- NULL,
- -1,
- keywdarg_methods
+ static struct PyModuleDef keywdarg_module = {
+ .m_base = PyModuleDef_HEAD_INIT,
+ .m_name = "keywdarg",
+ .m_size = 0,
+ .m_methods = keywdarg_methods,
};
PyMODINIT_FUNC
PyInit_keywdarg(void)
{
- return PyModule_Create(&keywdargmodule);
+ return PyModuleDef_Init(&keywdarg_module);
}
@@ -1072,8 +1089,9 @@ why his :meth:`!__del__` methods would fail...
The second case of problems with a borrowed reference is a variant involving
threads. Normally, multiple threads in the Python interpreter can't get in each
-other's way, because there is a global lock protecting Python's entire object
-space. However, it is possible to temporarily release this lock using the macro
+other's way, because there is a :term:`global lock <global interpreter lock>`
+protecting Python's entire object space.
+However, it is possible to temporarily release this lock using the macro
:c:macro:`Py_BEGIN_ALLOW_THREADS`, and to re-acquire it using
:c:macro:`Py_END_ALLOW_THREADS`. This is common around blocking I/O calls, to
let other threads use the processor while waiting for the I/O to complete.
@@ -1259,20 +1277,15 @@ two more lines must be added::
#include "spammodule.h"
The ``#define`` is used to tell the header file that it is being included in the
-exporting module, not a client module. Finally, the module's initialization
-function must take care of initializing the C API pointer array::
+exporting module, not a client module. Finally, the module's :c:data:`mod_exec
+<Py_mod_exec>` function must take care of initializing the C API pointer array::
- PyMODINIT_FUNC
- PyInit_spam(void)
+ static int
+ spam_module_exec(PyObject *m)
{
- PyObject *m;
static void *PySpam_API[PySpam_API_pointers];
PyObject *c_api_object;
- m = PyModule_Create(&spammodule);
- if (m == NULL)
- return NULL;
-
/* Initialize the C API pointer array */
PySpam_API[PySpam_System_NUM] = (void *)PySpam_System;
@@ -1280,11 +1293,10 @@ function must take care of initializing the C API pointer array::
c_api_object = PyCapsule_New((void *)PySpam_API, "spam._C_API", NULL);
if (PyModule_Add(m, "_C_API", c_api_object) < 0) {
- Py_DECREF(m);
- return NULL;
+ return -1;
}
- return m;
+ return 0;
}
Note that ``PySpam_API`` is declared ``static``; otherwise the pointer
@@ -1343,20 +1355,16 @@ like this::
All that a client module must do in order to have access to the function
:c:func:`!PySpam_System` is to call the function (or rather macro)
-:c:func:`!import_spam` in its initialization function::
+:c:func:`!import_spam` in its :c:data:`mod_exec <Py_mod_exec>` function::
- PyMODINIT_FUNC
- PyInit_client(void)
+ static int
+ client_module_exec(PyObject *m)
{
- PyObject *m;
-
- m = PyModule_Create(&clientmodule);
- if (m == NULL)
- return NULL;
- if (import_spam() < 0)
- return NULL;
+ if (import_spam() < 0) {
+ return -1;
+ }
/* additional initialization can happen here */
- return m;
+ return 0;
}
The main disadvantage of this approach is that the file :file:`spammodule.h` is
diff --git a/Doc/extending/index.rst b/Doc/extending/index.rst
index 01b4df6d44a..4cc2c96d8d5 100644
--- a/Doc/extending/index.rst
+++ b/Doc/extending/index.rst
@@ -26,19 +26,9 @@ Recommended third party tools
=============================
This guide only covers the basic tools for creating extensions provided
-as part of this version of CPython. Third party tools like
-`Cython <https://cython.org/>`_, `cffi <https://cffi.readthedocs.io>`_,
-`SWIG <https://www.swig.org>`_ and `Numba <https://numba.pydata.org/>`_
-offer both simpler and more sophisticated approaches to creating C and C++
-extensions for Python.
-
-.. seealso::
-
- `Python Packaging User Guide: Binary Extensions <https://packaging.python.org/guides/packaging-binary-extensions/>`_
- The Python Packaging User Guide not only covers several available
- tools that simplify the creation of binary extensions, but also
- discusses the various reasons why creating an extension module may be
- desirable in the first place.
+as part of this version of CPython. Some :ref:`third party tools
+<c-api-tools>` offer both simpler and more sophisticated approaches to creating
+C and C++ extensions for Python.
Creating extensions without third party tools
@@ -49,6 +39,10 @@ assistance from third party tools. It is intended primarily for creators
of those tools, rather than being a recommended way to create your own
C extensions.
+.. seealso::
+
+ :pep:`489` -- Multi-phase extension module initialization
+
.. toctree::
:maxdepth: 2
:numbered:
diff --git a/Doc/extending/newtypes_tutorial.rst b/Doc/extending/newtypes_tutorial.rst
index 3fc91841416..f14690de4f8 100644
--- a/Doc/extending/newtypes_tutorial.rst
+++ b/Doc/extending/newtypes_tutorial.rst
@@ -55,8 +55,10 @@ from the previous chapter. This file defines three things:
#. How the :class:`!Custom` **type** behaves: this is the ``CustomType`` struct,
which defines a set of flags and function pointers that the interpreter
inspects when specific operations are requested.
-#. How to initialize the :mod:`!custom` module: this is the ``PyInit_custom``
- function and the associated ``custommodule`` struct.
+#. How to define and execute the :mod:`!custom` module: this is the
+ ``PyInit_custom`` function and the associated ``custom_module`` struct for
+ defining the module, and the ``custom_module_exec`` function to set up
+ a fresh module object.
The first bit is::
@@ -171,18 +173,18 @@ implementation provided by the API function :c:func:`PyType_GenericNew`. ::
.tp_new = PyType_GenericNew,
Everything else in the file should be familiar, except for some code in
-:c:func:`!PyInit_custom`::
+:c:func:`!custom_module_exec`::
- if (PyType_Ready(&CustomType) < 0)
- return;
+ if (PyType_Ready(&CustomType) < 0) {
+ return -1;
+ }
This initializes the :class:`!Custom` type, filling in a number of members
to the appropriate default values, including :c:member:`~PyObject.ob_type` that we initially
set to ``NULL``. ::
if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
- Py_DECREF(m);
- return NULL;
+ return -1;
}
This adds the type to the module dictionary. This allows us to create
@@ -875,27 +877,22 @@ but let the base class handle it by calling its own :c:member:`~PyTypeObject.tp_
The :c:type:`PyTypeObject` struct supports a :c:member:`~PyTypeObject.tp_base`
specifying the type's concrete base class. Due to cross-platform compiler
issues, you can't fill that field directly with a reference to
-:c:type:`PyList_Type`; it should be done later in the module initialization
+:c:type:`PyList_Type`; it should be done in the :c:data:`Py_mod_exec`
function::
- PyMODINIT_FUNC
- PyInit_sublist(void)
+ static int
+ sublist_module_exec(PyObject *m)
{
- PyObject* m;
SubListType.tp_base = &PyList_Type;
- if (PyType_Ready(&SubListType) < 0)
- return NULL;
-
- m = PyModule_Create(&sublistmodule);
- if (m == NULL)
- return NULL;
+ if (PyType_Ready(&SubListType) < 0) {
+ return -1;
+ }
if (PyModule_AddObjectRef(m, "SubList", (PyObject *) &SubListType) < 0) {
- Py_DECREF(m);
- return NULL;
+ return -1;
}
- return m;
+ return 0;
}
Before calling :c:func:`PyType_Ready`, the type structure must have the
diff --git a/Doc/extending/windows.rst b/Doc/extending/windows.rst
index 56aa44e4e58..a97c6182553 100644
--- a/Doc/extending/windows.rst
+++ b/Doc/extending/windows.rst
@@ -121,7 +121,7 @@ When creating DLLs in Windows, you can use the CPython library in two ways:
:file:`Python.h` triggers an implicit, configure-aware link with the
library. The header file chooses :file:`pythonXY_d.lib` for Debug,
:file:`pythonXY.lib` for Release, and :file:`pythonX.lib` for Release with
- the `Limited API <stable-application-binary-interface>`_ enabled.
+ the :ref:`Limited API <stable-application-binary-interface>` enabled.
To build two DLLs, spam and ni (which uses C functions found in spam), you
could use these commands::
diff --git a/Doc/faq/extending.rst b/Doc/faq/extending.rst
index 3147fda7c37..1d5abed2317 100644
--- a/Doc/faq/extending.rst
+++ b/Doc/faq/extending.rst
@@ -37,24 +37,9 @@ Writing C is hard; are there any alternatives?
----------------------------------------------
There are a number of alternatives to writing your own C extensions, depending
-on what you're trying to do.
-
-.. XXX make sure these all work
-
-`Cython <https://cython.org>`_ and its relative `Pyrex
-<https://www.csse.canterbury.ac.nz/greg.ewing/python/Pyrex/>`_ are compilers
-that accept a slightly modified form of Python and generate the corresponding
-C code. Cython and Pyrex make it possible to write an extension without having
-to learn Python's C API.
-
-If you need to interface to some C or C++ library for which no Python extension
-currently exists, you can try wrapping the library's data types and functions
-with a tool such as `SWIG <https://www.swig.org>`_. `SIP
-<https://github.com/Python-SIP/sip>`__, `CXX
-<https://cxx.sourceforge.net/>`_ `Boost
-<https://www.boost.org/libs/python/doc/index.html>`_, or `Weave
-<https://github.com/scipy/weave>`_ are also
-alternatives for wrapping C++ libraries.
+on what you're trying to do. :ref:`Recommended third party tools <c-api-tools>`
+offer both simpler and more sophisticated approaches to creating C and C++
+extensions for Python.
How can I execute arbitrary Python statements from C?
diff --git a/Doc/howto/cporting.rst b/Doc/howto/cporting.rst
index 7773620b40b..cf857aed042 100644
--- a/Doc/howto/cporting.rst
+++ b/Doc/howto/cporting.rst
@@ -14,13 +14,11 @@ We recommend the following resources for porting extension modules to Python 3:
module.
* The `Porting guide`_ from the *py3c* project provides opinionated
suggestions with supporting code.
-* The `Cython`_ and `CFFI`_ libraries offer abstractions over
- Python's C API.
+* :ref:`Recommended third party tools <c-api-tools>` offer abstractions over
+ the Python's C API.
Extensions generally need to be re-written to use one of them,
but the library then handles differences between various Python
versions and implementations.
.. _Migrating C extensions: http://python3porting.com/cextensions.html
.. _Porting guide: https://py3c.readthedocs.io/en/latest/guide.html
-.. _Cython: https://cython.org/
-.. _CFFI: https://cffi.readthedocs.io/en/latest/
diff --git a/Doc/howto/curses.rst b/Doc/howto/curses.rst
index 6994a5328e8..816639552d7 100644
--- a/Doc/howto/curses.rst
+++ b/Doc/howto/curses.rst
@@ -161,6 +161,8 @@ your terminal won't be left in a funny state on exception and you'll be
able to read the exception's message and traceback.
+.. _windows-and-pads:
+
Windows and Pads
================
diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst
index 5513cd73675..b2109b15039 100644
--- a/Doc/howto/isolating-extensions.rst
+++ b/Doc/howto/isolating-extensions.rst
@@ -168,7 +168,7 @@ possible, consider explicit locking.
If it is necessary to use process-global state, the simplest way to
avoid issues with multiple interpreters is to explicitly prevent a
module from being loaded more than once per process—see
-`Opt-Out: Limiting to One Module Object per Process`_.
+:ref:`isolating-extensions-optout`.
Managing Per-Module State
@@ -207,6 +207,8 @@ An example of a module with per-module state is currently available as
example module initialization shown at the bottom of the file.
+.. _isolating-extensions-optout:
+
Opt-Out: Limiting to One Module Object per Process
--------------------------------------------------
diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst
index 33a2a7ea89e..d79d1abe8d0 100644
--- a/Doc/howto/urllib2.rst
+++ b/Doc/howto/urllib2.rst
@@ -245,75 +245,27 @@ codes in the 100--299 range indicate success, you will usually only see error
codes in the 400--599 range.
:attr:`http.server.BaseHTTPRequestHandler.responses` is a useful dictionary of
-response codes in that shows all the response codes used by :rfc:`2616`. The
-dictionary is reproduced here for convenience ::
+response codes that shows all the response codes used by :rfc:`2616`.
+An excerpt from the dictionary is shown below ::
- # Table mapping response codes to messages; entries have the
- # form {code: (shortmessage, longmessage)}.
responses = {
- 100: ('Continue', 'Request received, please continue'),
- 101: ('Switching Protocols',
- 'Switching to new protocol; obey Upgrade header'),
-
- 200: ('OK', 'Request fulfilled, document follows'),
- 201: ('Created', 'Document created, URL follows'),
- 202: ('Accepted',
- 'Request accepted, processing continues off-line'),
- 203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
- 204: ('No Content', 'Request fulfilled, nothing follows'),
- 205: ('Reset Content', 'Clear input form for further input.'),
- 206: ('Partial Content', 'Partial content follows.'),
-
- 300: ('Multiple Choices',
- 'Object has several resources -- see URI list'),
- 301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
- 302: ('Found', 'Object moved temporarily -- see URI list'),
- 303: ('See Other', 'Object moved -- see Method and URL list'),
- 304: ('Not Modified',
- 'Document has not changed since given time'),
- 305: ('Use Proxy',
- 'You must use proxy specified in Location to access this '
- 'resource.'),
- 307: ('Temporary Redirect',
- 'Object moved temporarily -- see URI list'),
-
- 400: ('Bad Request',
- 'Bad request syntax or unsupported method'),
- 401: ('Unauthorized',
- 'No permission -- see authorization schemes'),
- 402: ('Payment Required',
- 'No payment -- see charging schemes'),
- 403: ('Forbidden',
- 'Request forbidden -- authorization will not help'),
- 404: ('Not Found', 'Nothing matches the given URI'),
- 405: ('Method Not Allowed',
- 'Specified method is invalid for this server.'),
- 406: ('Not Acceptable', 'URI not available in preferred format.'),
- 407: ('Proxy Authentication Required', 'You must authenticate with '
- 'this proxy before proceeding.'),
- 408: ('Request Timeout', 'Request timed out; try again later.'),
- 409: ('Conflict', 'Request conflict.'),
- 410: ('Gone',
- 'URI no longer exists and has been permanently removed.'),
- 411: ('Length Required', 'Client must specify Content-Length.'),
- 412: ('Precondition Failed', 'Precondition in headers is false.'),
- 413: ('Request Entity Too Large', 'Entity is too large.'),
- 414: ('Request-URI Too Long', 'URI is too long.'),
- 415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
- 416: ('Requested Range Not Satisfiable',
- 'Cannot satisfy request range.'),
- 417: ('Expectation Failed',
- 'Expect condition could not be satisfied.'),
-
- 500: ('Internal Server Error', 'Server got itself in trouble'),
- 501: ('Not Implemented',
- 'Server does not support this operation'),
- 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
- 503: ('Service Unavailable',
- 'The server cannot process the request due to a high load'),
- 504: ('Gateway Timeout',
- 'The gateway server did not receive a timely response'),
- 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
+ ...
+ <HTTPStatus.OK: 200>: ('OK', 'Request fulfilled, document follows'),
+ ...
+ <HTTPStatus.FORBIDDEN: 403>: ('Forbidden',
+ 'Request forbidden -- authorization will '
+ 'not help'),
+ <HTTPStatus.NOT_FOUND: 404>: ('Not Found',
+ 'Nothing matches the given URI'),
+ ...
+ <HTTPStatus.IM_A_TEAPOT: 418>: ("I'm a Teapot",
+ 'Server refuses to brew coffee because '
+ 'it is a teapot'),
+ ...
+ <HTTPStatus.SERVICE_UNAVAILABLE: 503>: ('Service Unavailable',
+ 'The server cannot process the '
+ 'request due to a high load'),
+ ...
}
When an error is raised the server responds by returning an HTTP error code
diff --git a/Doc/includes/newtypes/custom.c b/Doc/includes/newtypes/custom.c
index 5253f879360..039a1a72193 100644
--- a/Doc/includes/newtypes/custom.c
+++ b/Doc/includes/newtypes/custom.c
@@ -16,28 +16,37 @@ static PyTypeObject CustomType = {
.tp_new = PyType_GenericNew,
};
-static PyModuleDef custommodule = {
+static int
+custom_module_exec(PyObject *m)
+{
+ if (PyType_Ready(&CustomType) < 0) {
+ return -1;
+ }
+
+ if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static PyModuleDef_Slot custom_module_slots[] = {
+ {Py_mod_exec, custom_module_exec},
+ // Just use this while using static types
+ {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED},
+ {0, NULL}
+};
+
+static PyModuleDef custom_module = {
.m_base = PyModuleDef_HEAD_INIT,
.m_name = "custom",
.m_doc = "Example module that creates an extension type.",
- .m_size = -1,
+ .m_size = 0,
+ .m_slots = custom_module_slots,
};
PyMODINIT_FUNC
PyInit_custom(void)
{
- PyObject *m;
- if (PyType_Ready(&CustomType) < 0)
- return NULL;
-
- m = PyModule_Create(&custommodule);
- if (m == NULL)
- return NULL;
-
- if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
- Py_DECREF(m);
- return NULL;
- }
-
- return m;
+ return PyModuleDef_Init(&custom_module);
}
diff --git a/Doc/includes/newtypes/custom2.c b/Doc/includes/newtypes/custom2.c
index a87917583ca..1ff8e707d1b 100644
--- a/Doc/includes/newtypes/custom2.c
+++ b/Doc/includes/newtypes/custom2.c
@@ -106,28 +106,36 @@ static PyTypeObject CustomType = {
.tp_methods = Custom_methods,
};
-static PyModuleDef custommodule = {
- .m_base =PyModuleDef_HEAD_INIT,
+static int
+custom_module_exec(PyObject *m)
+{
+ if (PyType_Ready(&CustomType) < 0) {
+ return -1;
+ }
+
+ if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static PyModuleDef_Slot custom_module_slots[] = {
+ {Py_mod_exec, custom_module_exec},
+ {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED},
+ {0, NULL}
+};
+
+static PyModuleDef custom_module = {
+ .m_base = PyModuleDef_HEAD_INIT,
.m_name = "custom2",
.m_doc = "Example module that creates an extension type.",
- .m_size = -1,
+ .m_size = 0,
+ .m_slots = custom_module_slots,
};
PyMODINIT_FUNC
PyInit_custom2(void)
{
- PyObject *m;
- if (PyType_Ready(&CustomType) < 0)
- return NULL;
-
- m = PyModule_Create(&custommodule);
- if (m == NULL)
- return NULL;
-
- if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
- Py_DECREF(m);
- return NULL;
- }
-
- return m;
+ return PyModuleDef_Init(&custom_module);
}
diff --git a/Doc/includes/newtypes/custom3.c b/Doc/includes/newtypes/custom3.c
index 854034d4066..22f50eb0e1d 100644
--- a/Doc/includes/newtypes/custom3.c
+++ b/Doc/includes/newtypes/custom3.c
@@ -151,28 +151,36 @@ static PyTypeObject CustomType = {
.tp_getset = Custom_getsetters,
};
-static PyModuleDef custommodule = {
+static int
+custom_module_exec(PyObject *m)
+{
+ if (PyType_Ready(&CustomType) < 0) {
+ return -1;
+ }
+
+ if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static PyModuleDef_Slot custom_module_slots[] = {
+ {Py_mod_exec, custom_module_exec},
+ {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED},
+ {0, NULL}
+};
+
+static PyModuleDef custom_module = {
.m_base = PyModuleDef_HEAD_INIT,
.m_name = "custom3",
.m_doc = "Example module that creates an extension type.",
- .m_size = -1,
+ .m_size = 0,
+ .m_slots = custom_module_slots,
};
PyMODINIT_FUNC
PyInit_custom3(void)
{
- PyObject *m;
- if (PyType_Ready(&CustomType) < 0)
- return NULL;
-
- m = PyModule_Create(&custommodule);
- if (m == NULL)
- return NULL;
-
- if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
- Py_DECREF(m);
- return NULL;
- }
-
- return m;
+ return PyModuleDef_Init(&custom_module);
}
diff --git a/Doc/includes/newtypes/custom4.c b/Doc/includes/newtypes/custom4.c
index a0a1eeb2891..07585aff598 100644
--- a/Doc/includes/newtypes/custom4.c
+++ b/Doc/includes/newtypes/custom4.c
@@ -170,28 +170,36 @@ static PyTypeObject CustomType = {
.tp_getset = Custom_getsetters,
};
-static PyModuleDef custommodule = {
+static int
+custom_module_exec(PyObject *m)
+{
+ if (PyType_Ready(&CustomType) < 0) {
+ return -1;
+ }
+
+ if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static PyModuleDef_Slot custom_module_slots[] = {
+ {Py_mod_exec, custom_module_exec},
+ {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED},
+ {0, NULL}
+};
+
+static PyModuleDef custom_module = {
.m_base = PyModuleDef_HEAD_INIT,
.m_name = "custom4",
.m_doc = "Example module that creates an extension type.",
- .m_size = -1,
+ .m_size = 0,
+ .m_slots = custom_module_slots,
};
PyMODINIT_FUNC
PyInit_custom4(void)
{
- PyObject *m;
- if (PyType_Ready(&CustomType) < 0)
- return NULL;
-
- m = PyModule_Create(&custommodule);
- if (m == NULL)
- return NULL;
-
- if (PyModule_AddObjectRef(m, "Custom", (PyObject *) &CustomType) < 0) {
- Py_DECREF(m);
- return NULL;
- }
-
- return m;
+ return PyModuleDef_Init(&custom_module);
}
diff --git a/Doc/includes/newtypes/sublist.c b/Doc/includes/newtypes/sublist.c
index 00664f34541..b784456a4ef 100644
--- a/Doc/includes/newtypes/sublist.c
+++ b/Doc/includes/newtypes/sublist.c
@@ -31,7 +31,7 @@ SubList_init(PyObject *op, PyObject *args, PyObject *kwds)
}
static PyTypeObject SubListType = {
- PyVarObject_HEAD_INIT(NULL, 0)
+ .ob_base = PyVarObject_HEAD_INIT(NULL, 0)
.tp_name = "sublist.SubList",
.tp_doc = PyDoc_STR("SubList objects"),
.tp_basicsize = sizeof(SubListObject),
@@ -41,29 +41,37 @@ static PyTypeObject SubListType = {
.tp_methods = SubList_methods,
};
-static PyModuleDef sublistmodule = {
- PyModuleDef_HEAD_INIT,
+static int
+sublist_module_exec(PyObject *m)
+{
+ SubListType.tp_base = &PyList_Type;
+ if (PyType_Ready(&SubListType) < 0) {
+ return -1;
+ }
+
+ if (PyModule_AddObjectRef(m, "SubList", (PyObject *) &SubListType) < 0) {
+ return -1;
+ }
+
+ return 0;
+}
+
+static PyModuleDef_Slot sublist_module_slots[] = {
+ {Py_mod_exec, sublist_module_exec},
+ {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_NOT_SUPPORTED},
+ {0, NULL}
+};
+
+static PyModuleDef sublist_module = {
+ .m_base = PyModuleDef_HEAD_INIT,
.m_name = "sublist",
.m_doc = "Example module that creates an extension type.",
- .m_size = -1,
+ .m_size = 0,
+ .m_slots = sublist_module_slots,
};
PyMODINIT_FUNC
PyInit_sublist(void)
{
- PyObject *m;
- SubListType.tp_base = &PyList_Type;
- if (PyType_Ready(&SubListType) < 0)
- return NULL;
-
- m = PyModule_Create(&sublistmodule);
- if (m == NULL)
- return NULL;
-
- if (PyModule_AddObjectRef(m, "SubList", (PyObject *) &SubListType) < 0) {
- Py_DECREF(m);
- return NULL;
- }
-
- return m;
+ return PyModuleDef_Init(&sublist_module);
}
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index 29396c7a036..17f126cc065 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -2122,12 +2122,15 @@ Partial parsing
.. method:: ArgumentParser.parse_known_args(args=None, namespace=None)
- Sometimes a script may only parse a few of the command-line arguments, passing
- the remaining arguments on to another script or program. In these cases, the
- :meth:`~ArgumentParser.parse_known_args` method can be useful. It works much like
- :meth:`~ArgumentParser.parse_args` except that it does not produce an error when
- extra arguments are present. Instead, it returns a two item tuple containing
- the populated namespace and the list of remaining argument strings.
+ Sometimes a script only needs to handle a specific set of command-line
+ arguments, leaving any unrecognized arguments for another script or program.
+ In these cases, the :meth:`~ArgumentParser.parse_known_args` method can be
+ useful.
+
+ This method works similarly to :meth:`~ArgumentParser.parse_args`, but it does
+ not raise an error for extra, unrecognized arguments. Instead, it parses the
+ known arguments and returns a two item tuple that contains the populated
+ namespace and the list of any unrecognized arguments.
::
diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst
index ca9a6b0712c..ef6c62dca1e 100644
--- a/Doc/library/ast.rst
+++ b/Doc/library/ast.rst
@@ -252,12 +252,11 @@ Root nodes
>>> print(ast.dump(ast.parse('(int, str) -> List[int]', mode='func_type'), indent=4))
FunctionType(
argtypes=[
- Name(id='int', ctx=Load()),
- Name(id='str', ctx=Load())],
+ Name(id='int'),
+ Name(id='str')],
returns=Subscript(
- value=Name(id='List', ctx=Load()),
- slice=Name(id='int', ctx=Load()),
- ctx=Load()))
+ value=Name(id='List'),
+ slice=Name(id='int')))
.. versionadded:: 3.8
@@ -268,9 +267,9 @@ Literals
.. class:: Constant(value)
A constant value. The ``value`` attribute of the ``Constant`` literal contains the
- Python object it represents. The values represented can be simple types
- such as a number, string or ``None``, but also immutable container types
- (tuples and frozensets) if all of their elements are constant.
+ Python object it represents. The values represented can be instances of :class:`str`,
+ :class:`bytes`, :class:`int`, :class:`float`, :class:`complex`, and :class:`bool`,
+ and the constants :data:`None` and :data:`Ellipsis`.
.. doctest::
@@ -312,14 +311,14 @@ Literals
values=[
Constant(value='sin('),
FormattedValue(
- value=Name(id='a', ctx=Load()),
+ value=Name(id='a'),
conversion=-1),
Constant(value=') is '),
FormattedValue(
value=Call(
- func=Name(id='sin', ctx=Load()),
+ func=Name(id='sin'),
args=[
- Name(id='a', ctx=Load())]),
+ Name(id='a')]),
conversion=-1,
format_spec=JoinedStr(
values=[
@@ -341,16 +340,14 @@ Literals
elts=[
Constant(value=1),
Constant(value=2),
- Constant(value=3)],
- ctx=Load()))
+ Constant(value=3)]))
>>> print(ast.dump(ast.parse('(1, 2, 3)', mode='eval'), indent=4))
Expression(
body=Tuple(
elts=[
Constant(value=1),
Constant(value=2),
- Constant(value=3)],
- ctx=Load()))
+ Constant(value=3)]))
.. class:: Set(elts)
@@ -388,7 +385,7 @@ Literals
None],
values=[
Constant(value=1),
- Name(id='d', ctx=Load())]))
+ Name(id='d')]))
Variables
@@ -414,7 +411,7 @@ Variables
Module(
body=[
Expr(
- value=Name(id='a', ctx=Load()))])
+ value=Name(id='a'))])
>>> print(ast.dump(ast.parse('a = 1'), indent=4))
Module(
@@ -452,7 +449,7 @@ Variables
value=Name(id='b', ctx=Store()),
ctx=Store())],
ctx=Store())],
- value=Name(id='it', ctx=Load()))])
+ value=Name(id='it'))])
.. _ast-expressions:
@@ -475,7 +472,7 @@ Expressions
Expr(
value=UnaryOp(
op=USub(),
- operand=Name(id='a', ctx=Load())))])
+ operand=Name(id='a')))])
.. class:: UnaryOp(op, operand)
@@ -498,7 +495,7 @@ Expressions
Expression(
body=UnaryOp(
op=Not(),
- operand=Name(id='x', ctx=Load())))
+ operand=Name(id='x')))
.. class:: BinOp(left, op, right)
@@ -511,9 +508,9 @@ Expressions
>>> print(ast.dump(ast.parse('x + y', mode='eval'), indent=4))
Expression(
body=BinOp(
- left=Name(id='x', ctx=Load()),
+ left=Name(id='x'),
op=Add(),
- right=Name(id='y', ctx=Load())))
+ right=Name(id='y')))
.. class:: Add
@@ -549,8 +546,8 @@ Expressions
body=BoolOp(
op=Or(),
values=[
- Name(id='x', ctx=Load()),
- Name(id='y', ctx=Load())]))
+ Name(id='x'),
+ Name(id='y')]))
.. class:: And
@@ -575,7 +572,7 @@ Expressions
LtE(),
Lt()],
comparators=[
- Name(id='a', ctx=Load()),
+ Name(id='a'),
Constant(value=10)]))
@@ -609,18 +606,17 @@ Expressions
>>> print(ast.dump(ast.parse('func(a, b=c, *d, **e)', mode='eval'), indent=4))
Expression(
body=Call(
- func=Name(id='func', ctx=Load()),
+ func=Name(id='func'),
args=[
- Name(id='a', ctx=Load()),
+ Name(id='a'),
Starred(
- value=Name(id='d', ctx=Load()),
- ctx=Load())],
+ value=Name(id='d'))],
keywords=[
keyword(
arg='b',
- value=Name(id='c', ctx=Load())),
+ value=Name(id='c')),
keyword(
- value=Name(id='e', ctx=Load()))]))
+ value=Name(id='e'))]))
.. class:: keyword(arg, value)
@@ -639,9 +635,9 @@ Expressions
>>> print(ast.dump(ast.parse('a if b else c', mode='eval'), indent=4))
Expression(
body=IfExp(
- test=Name(id='b', ctx=Load()),
- body=Name(id='a', ctx=Load()),
- orelse=Name(id='c', ctx=Load())))
+ test=Name(id='b'),
+ body=Name(id='a'),
+ orelse=Name(id='c')))
.. class:: Attribute(value, attr, ctx)
@@ -656,9 +652,8 @@ Expressions
>>> print(ast.dump(ast.parse('snake.colour', mode='eval'), indent=4))
Expression(
body=Attribute(
- value=Name(id='snake', ctx=Load()),
- attr='colour',
- ctx=Load()))
+ value=Name(id='snake'),
+ attr='colour'))
.. class:: NamedExpr(target, value)
@@ -694,15 +689,13 @@ Subscripting
>>> print(ast.dump(ast.parse('l[1:2, 3]', mode='eval'), indent=4))
Expression(
body=Subscript(
- value=Name(id='l', ctx=Load()),
+ value=Name(id='l'),
slice=Tuple(
elts=[
Slice(
lower=Constant(value=1),
upper=Constant(value=2)),
- Constant(value=3)],
- ctx=Load()),
- ctx=Load()))
+ Constant(value=3)])))
.. class:: Slice(lower, upper, step)
@@ -716,11 +709,10 @@ Subscripting
>>> print(ast.dump(ast.parse('l[1:2]', mode='eval'), indent=4))
Expression(
body=Subscript(
- value=Name(id='l', ctx=Load()),
+ value=Name(id='l'),
slice=Slice(
lower=Constant(value=1),
- upper=Constant(value=2)),
- ctx=Load()))
+ upper=Constant(value=2))))
Comprehensions
@@ -745,11 +737,11 @@ Comprehensions
... ))
Expression(
body=ListComp(
- elt=Name(id='x', ctx=Load()),
+ elt=Name(id='x'),
generators=[
comprehension(
target=Name(id='x', ctx=Store()),
- iter=Name(id='numbers', ctx=Load()),
+ iter=Name(id='numbers'),
is_async=0)]))
>>> print(ast.dump(
... ast.parse('{x: x**2 for x in numbers}', mode='eval'),
@@ -757,15 +749,15 @@ Comprehensions
... ))
Expression(
body=DictComp(
- key=Name(id='x', ctx=Load()),
+ key=Name(id='x'),
value=BinOp(
- left=Name(id='x', ctx=Load()),
+ left=Name(id='x'),
op=Pow(),
right=Constant(value=2)),
generators=[
comprehension(
target=Name(id='x', ctx=Store()),
- iter=Name(id='numbers', ctx=Load()),
+ iter=Name(id='numbers'),
is_async=0)]))
>>> print(ast.dump(
... ast.parse('{x for x in numbers}', mode='eval'),
@@ -773,11 +765,11 @@ Comprehensions
... ))
Expression(
body=SetComp(
- elt=Name(id='x', ctx=Load()),
+ elt=Name(id='x'),
generators=[
comprehension(
target=Name(id='x', ctx=Store()),
- iter=Name(id='numbers', ctx=Load()),
+ iter=Name(id='numbers'),
is_async=0)]))
@@ -798,17 +790,17 @@ Comprehensions
Expression(
body=ListComp(
elt=Call(
- func=Name(id='ord', ctx=Load()),
+ func=Name(id='ord'),
args=[
- Name(id='c', ctx=Load())]),
+ Name(id='c')]),
generators=[
comprehension(
target=Name(id='line', ctx=Store()),
- iter=Name(id='file', ctx=Load()),
+ iter=Name(id='file'),
is_async=0),
comprehension(
target=Name(id='c', ctx=Store()),
- iter=Name(id='line', ctx=Load()),
+ iter=Name(id='line'),
is_async=0)]))
>>> print(ast.dump(ast.parse('(n**2 for n in it if n>5 if n<10)', mode='eval'),
@@ -816,22 +808,22 @@ Comprehensions
Expression(
body=GeneratorExp(
elt=BinOp(
- left=Name(id='n', ctx=Load()),
+ left=Name(id='n'),
op=Pow(),
right=Constant(value=2)),
generators=[
comprehension(
target=Name(id='n', ctx=Store()),
- iter=Name(id='it', ctx=Load()),
+ iter=Name(id='it'),
ifs=[
Compare(
- left=Name(id='n', ctx=Load()),
+ left=Name(id='n'),
ops=[
Gt()],
comparators=[
Constant(value=5)]),
Compare(
- left=Name(id='n', ctx=Load()),
+ left=Name(id='n'),
ops=[
Lt()],
comparators=[
@@ -842,11 +834,11 @@ Comprehensions
... indent=4)) # Async comprehension
Expression(
body=ListComp(
- elt=Name(id='i', ctx=Load()),
+ elt=Name(id='i'),
generators=[
comprehension(
target=Name(id='i', ctx=Store()),
- iter=Name(id='soc', ctx=Load()),
+ iter=Name(id='soc'),
is_async=1)]))
@@ -888,7 +880,7 @@ Statements
Name(id='a', ctx=Store()),
Name(id='b', ctx=Store())],
ctx=Store())],
- value=Name(id='c', ctx=Load()))])
+ value=Name(id='c'))])
.. class:: AnnAssign(target, annotation, value, simple)
@@ -911,7 +903,7 @@ Statements
body=[
AnnAssign(
target=Name(id='c', ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
simple=1)])
>>> print(ast.dump(ast.parse('(a): int = 1'), indent=4)) # Annotation with parenthesis
@@ -919,7 +911,7 @@ Statements
body=[
AnnAssign(
target=Name(id='a', ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
value=Constant(value=1),
simple=0)])
@@ -928,10 +920,10 @@ Statements
body=[
AnnAssign(
target=Attribute(
- value=Name(id='a', ctx=Load()),
+ value=Name(id='a'),
attr='b',
ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
simple=0)])
>>> print(ast.dump(ast.parse('a[1]: int'), indent=4)) # Subscript annotation
@@ -939,10 +931,10 @@ Statements
body=[
AnnAssign(
target=Subscript(
- value=Name(id='a', ctx=Load()),
+ value=Name(id='a'),
slice=Constant(value=1),
ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
simple=0)])
@@ -979,8 +971,8 @@ Statements
Module(
body=[
Raise(
- exc=Name(id='x', ctx=Load()),
- cause=Name(id='y', ctx=Load()))])
+ exc=Name(id='x'),
+ cause=Name(id='y'))])
.. class:: Assert(test, msg)
@@ -994,8 +986,8 @@ Statements
Module(
body=[
Assert(
- test=Name(id='x', ctx=Load()),
- msg=Name(id='y', ctx=Load()))])
+ test=Name(id='x'),
+ msg=Name(id='y'))])
.. class:: Delete(targets)
@@ -1041,7 +1033,7 @@ Statements
body=[
TypeAlias(
name=Name(id='Alias', ctx=Store()),
- value=Name(id='int', ctx=Load()))])
+ value=Name(id='int'))])
.. versionadded:: 3.12
@@ -1134,13 +1126,13 @@ Control flow
Module(
body=[
If(
- test=Name(id='x', ctx=Load()),
+ test=Name(id='x'),
body=[
Expr(
value=Constant(value=Ellipsis))],
orelse=[
If(
- test=Name(id='y', ctx=Load()),
+ test=Name(id='y'),
body=[
Expr(
value=Constant(value=Ellipsis))],
@@ -1174,7 +1166,7 @@ Control flow
body=[
For(
target=Name(id='x', ctx=Store()),
- iter=Name(id='y', ctx=Load()),
+ iter=Name(id='y'),
body=[
Expr(
value=Constant(value=Ellipsis))],
@@ -1199,7 +1191,7 @@ Control flow
Module(
body=[
While(
- test=Name(id='x', ctx=Load()),
+ test=Name(id='x'),
body=[
Expr(
value=Constant(value=Ellipsis))],
@@ -1227,11 +1219,11 @@ Control flow
body=[
For(
target=Name(id='a', ctx=Store()),
- iter=Name(id='b', ctx=Load()),
+ iter=Name(id='b'),
body=[
If(
test=Compare(
- left=Name(id='a', ctx=Load()),
+ left=Name(id='a'),
ops=[
Gt()],
comparators=[
@@ -1269,12 +1261,12 @@ Control flow
value=Constant(value=Ellipsis))],
handlers=[
ExceptHandler(
- type=Name(id='Exception', ctx=Load()),
+ type=Name(id='Exception'),
body=[
Expr(
value=Constant(value=Ellipsis))]),
ExceptHandler(
- type=Name(id='OtherException', ctx=Load()),
+ type=Name(id='OtherException'),
name='e',
body=[
Expr(
@@ -1309,7 +1301,7 @@ Control flow
value=Constant(value=Ellipsis))],
handlers=[
ExceptHandler(
- type=Name(id='Exception', ctx=Load()),
+ type=Name(id='Exception'),
body=[
Expr(
value=Constant(value=Ellipsis))])])])
@@ -1337,12 +1329,12 @@ Control flow
body=[
Expr(
value=BinOp(
- left=Name(id='a', ctx=Load()),
+ left=Name(id='a'),
op=Add(),
right=Constant(value=1)))],
handlers=[
ExceptHandler(
- type=Name(id='TypeError', ctx=Load()),
+ type=Name(id='TypeError'),
body=[
Pass()])])])
@@ -1375,18 +1367,18 @@ Control flow
With(
items=[
withitem(
- context_expr=Name(id='a', ctx=Load()),
+ context_expr=Name(id='a'),
optional_vars=Name(id='b', ctx=Store())),
withitem(
- context_expr=Name(id='c', ctx=Load()),
+ context_expr=Name(id='c'),
optional_vars=Name(id='d', ctx=Store()))],
body=[
Expr(
value=Call(
- func=Name(id='something', ctx=Load()),
+ func=Name(id='something'),
args=[
- Name(id='b', ctx=Load()),
- Name(id='d', ctx=Load())]))])])
+ Name(id='b'),
+ Name(id='d')]))])])
Pattern matching
@@ -1426,14 +1418,14 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSequence(
patterns=[
MatchAs(name='x')]),
guard=Compare(
- left=Name(id='x', ctx=Load()),
+ left=Name(id='x'),
ops=[
Gt()],
comparators=[
@@ -1443,7 +1435,7 @@ Pattern matching
value=Constant(value=Ellipsis))]),
match_case(
pattern=MatchClass(
- cls=Name(id='tuple', ctx=Load())),
+ cls=Name(id='tuple')),
body=[
Expr(
value=Constant(value=Ellipsis))])])])
@@ -1467,7 +1459,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchValue(
@@ -1494,7 +1486,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSingleton(value=None),
@@ -1521,7 +1513,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSequence(
@@ -1554,7 +1546,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSequence(
@@ -1603,7 +1595,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchMapping(
@@ -1653,11 +1645,11 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchClass(
- cls=Name(id='Point2D', ctx=Load()),
+ cls=Name(id='Point2D'),
patterns=[
MatchValue(
value=Constant(value=0)),
@@ -1668,7 +1660,7 @@ Pattern matching
value=Constant(value=Ellipsis))]),
match_case(
pattern=MatchClass(
- cls=Name(id='Point3D', ctx=Load()),
+ cls=Name(id='Point3D'),
kwd_attrs=[
'x',
'y',
@@ -1709,7 +1701,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchAs(
@@ -1746,7 +1738,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchOr(
@@ -1786,7 +1778,7 @@ Type annotations
body=[
AnnAssign(
target=Name(id='x', ctx=Store()),
- annotation=Name(id='bool', ctx=Load()),
+ annotation=Name(id='bool'),
value=Constant(value=1),
simple=1)],
type_ignores=[
@@ -1824,12 +1816,11 @@ aliases.
type_params=[
TypeVar(
name='T',
- bound=Name(id='int', ctx=Load()),
- default_value=Name(id='bool', ctx=Load()))],
+ bound=Name(id='int'),
+ default_value=Name(id='bool'))],
value=Subscript(
- value=Name(id='list', ctx=Load()),
- slice=Name(id='T', ctx=Load()),
- ctx=Load()))])
+ value=Name(id='list'),
+ slice=Name(id='T')))])
.. versionadded:: 3.12
@@ -1854,17 +1845,14 @@ aliases.
name='P',
default_value=List(
elts=[
- Name(id='int', ctx=Load()),
- Name(id='str', ctx=Load())],
- ctx=Load()))],
+ Name(id='int'),
+ Name(id='str')]))],
value=Subscript(
- value=Name(id='Callable', ctx=Load()),
+ value=Name(id='Callable'),
slice=Tuple(
elts=[
- Name(id='P', ctx=Load()),
- Name(id='int', ctx=Load())],
- ctx=Load()),
- ctx=Load()))])
+ Name(id='P'),
+ Name(id='int')])))])
.. versionadded:: 3.12
@@ -1885,18 +1873,13 @@ aliases.
TypeAlias(
name=Name(id='Alias', ctx=Store()),
type_params=[
- TypeVarTuple(
- name='Ts',
- default_value=Tuple(ctx=Load()))],
+ TypeVarTuple(name='Ts', default_value=Tuple())],
value=Subscript(
- value=Name(id='tuple', ctx=Load()),
+ value=Name(id='tuple'),
slice=Tuple(
elts=[
Starred(
- value=Name(id='Ts', ctx=Load()),
- ctx=Load())],
- ctx=Load()),
- ctx=Load()))])
+ value=Name(id='Ts'))])))])
.. versionadded:: 3.12
@@ -2001,8 +1984,8 @@ Function and class definitions
body=[
Pass()],
decorator_list=[
- Name(id='decorator1', ctx=Load()),
- Name(id='decorator2', ctx=Load())],
+ Name(id='decorator1'),
+ Name(id='decorator2')],
returns=Constant(value='return annotation'))])
@@ -2032,14 +2015,14 @@ Function and class definitions
body=[
Expr(
value=Yield(
- value=Name(id='x', ctx=Load())))])
+ value=Name(id='x')))])
>>> print(ast.dump(ast.parse('yield from x'), indent=4))
Module(
body=[
Expr(
value=YieldFrom(
- value=Name(id='x', ctx=Load())))])
+ value=Name(id='x')))])
.. class:: Global(names)
@@ -2094,17 +2077,17 @@ Function and class definitions
ClassDef(
name='Foo',
bases=[
- Name(id='base1', ctx=Load()),
- Name(id='base2', ctx=Load())],
+ Name(id='base1'),
+ Name(id='base2')],
keywords=[
keyword(
arg='metaclass',
- value=Name(id='meta', ctx=Load()))],
+ value=Name(id='meta'))],
body=[
Pass()],
decorator_list=[
- Name(id='decorator1', ctx=Load()),
- Name(id='decorator2', ctx=Load())])])
+ Name(id='decorator1'),
+ Name(id='decorator2')])])
.. versionchanged:: 3.12
Added ``type_params``.
@@ -2141,7 +2124,7 @@ Async and await
Expr(
value=Await(
value=Call(
- func=Name(id='other_func', ctx=Load()))))])])
+ func=Name(id='other_func'))))])])
.. class:: AsyncFor(target, iter, body, orelse, type_comment)
@@ -2402,7 +2385,7 @@ and classes for traversing abstract syntax trees:
def visit_Name(self, node):
return Subscript(
- value=Name(id='data', ctx=Load()),
+ value=Name(id='data'),
slice=Constant(value=node.id),
ctx=node.ctx
)
@@ -2445,8 +2428,26 @@ and classes for traversing abstract syntax trees:
indents that many spaces per level. If *indent* is a string (such as ``"\t"``),
that string is used to indent each level.
- If *show_empty* is ``False`` (the default), empty lists and fields that are ``None``
- will be omitted from the output.
+ If *show_empty* is false (the default), optional empty lists and
+ ``Load()`` values will be omitted from the output.
+ Optional ``None`` values are always omitted.
+
+ .. doctest::
+
+ >>> tree = ast.parse('print(None)', '?', 'eval')
+ >>> print(ast.dump(tree, indent=4))
+ Expression(
+ body=Call(
+ func=Name(id='print'),
+ args=[
+ Constant(value=None)]))
+ >>> print(ast.dump(tree, indent=4, show_empty=True))
+ Expression(
+ body=Call(
+ func=Name(id='print', ctx=Load()),
+ args=[
+ Constant(value=None)],
+ keywords=[]))
.. versionchanged:: 3.9
Added the *indent* option.
@@ -2454,32 +2455,8 @@ and classes for traversing abstract syntax trees:
.. versionchanged:: 3.13
Added the *show_empty* option.
- .. doctest::
-
- >>> print(ast.dump(ast.parse("""\
- ... async def f():
- ... await other_func()
- ... """), indent=4, show_empty=True))
- Module(
- body=[
- AsyncFunctionDef(
- name='f',
- args=arguments(
- posonlyargs=[],
- args=[],
- kwonlyargs=[],
- kw_defaults=[],
- defaults=[]),
- body=[
- Expr(
- value=Await(
- value=Call(
- func=Name(id='other_func', ctx=Load()),
- args=[],
- keywords=[])))],
- decorator_list=[],
- type_params=[])],
- type_ignores=[])
+ .. versionchanged:: next
+ Omit optional ``Load()`` values by default.
.. _ast-compiler-flags:
diff --git a/Doc/library/asyncio-stream.rst b/Doc/library/asyncio-stream.rst
index c56166cabb9..90c90862ca1 100644
--- a/Doc/library/asyncio-stream.rst
+++ b/Doc/library/asyncio-stream.rst
@@ -171,13 +171,17 @@ and work with streams:
.. function:: start_unix_server(client_connected_cb, path=None, \
*, limit=None, sock=None, backlog=100, ssl=None, \
ssl_handshake_timeout=None, \
- ssl_shutdown_timeout=None, start_serving=True)
+ ssl_shutdown_timeout=None, start_serving=True, cleanup_socket=True)
:async:
Start a Unix socket server.
Similar to :func:`start_server` but works with Unix sockets.
+ If *cleanup_socket* is true then the Unix socket will automatically
+ be removed from the filesystem when the server is closed, unless the
+ socket has been replaced after the server has been created.
+
See also the documentation of :meth:`loop.create_unix_server`.
.. note::
@@ -198,6 +202,9 @@ and work with streams:
.. versionchanged:: 3.11
Added the *ssl_shutdown_timeout* parameter.
+ .. versionchanged:: 3.13
+ Added the *cleanup_socket* parameter.
+
StreamReader
============
diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst
index c42288419c4..ebbbf857e71 100644
--- a/Doc/library/compileall.rst
+++ b/Doc/library/compileall.rst
@@ -56,11 +56,18 @@ compile Python sources.
executed.
.. option:: -s strip_prefix
+
+ Remove the given prefix from paths recorded in the ``.pyc`` files.
+ Paths are made relative to the prefix.
+
+ This option can be used with ``-p`` but not with ``-d``.
+
.. option:: -p prepend_prefix
- Remove (``-s``) or append (``-p``) the given prefix of paths
- recorded in the ``.pyc`` files.
- Cannot be combined with ``-d``.
+ Prepend the given prefix to paths recorded in the ``.pyc`` files.
+ Use ``-p /`` to make the paths absolute.
+
+ This option can be used with ``-s`` but not with ``-d``.
.. option:: -x regex
diff --git a/Doc/library/compression.zstd.rst b/Doc/library/compression.zstd.rst
index 1e1802155a1..35bcbc2bfd8 100644
--- a/Doc/library/compression.zstd.rst
+++ b/Doc/library/compression.zstd.rst
@@ -615,6 +615,24 @@ Advanced parameter control
A value of zero causes the value to be selected automatically.
+ .. attribute:: content_size_flag
+
+ Write the size of the data to be compressed into the Zstandard frame
+ header when known prior to compressing.
+
+ This flag only takes effect under the following two scenarios:
+
+ * Calling :func:`compress` for one-shot compression
+ * Providing all of the data to be compressed in the frame in a single
+ :meth:`ZstdCompressor.compress` call, with the
+ :attr:`ZstdCompressor.FLUSH_FRAME` mode.
+
+ All other compression calls may not write the size information into the
+ frame header.
+
+ ``True`` or ``1`` enable the content size flag while ``False`` or ``0``
+ disable it.
+
.. attribute:: checksum_flag
A four-byte checksum using XXHash64 of the uncompressed content is
diff --git a/Doc/library/copy.rst b/Doc/library/copy.rst
index 95b41f988a0..210ad718800 100644
--- a/Doc/library/copy.rst
+++ b/Doc/library/copy.rst
@@ -122,6 +122,8 @@ and only supports named tuples created by :func:`~collections.namedtuple`,
This method should create a new object of the same type,
replacing fields with values from *changes*.
+ .. versionadded:: 3.13
+
.. seealso::
diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst
index 533cdf13974..5297be17bd7 100644
--- a/Doc/library/csv.rst
+++ b/Doc/library/csv.rst
@@ -70,7 +70,7 @@ The :mod:`csv` module defines the following functions:
section :ref:`csv-fmt-params`.
Each row read from the csv file is returned as a list of strings. No
- automatic data type conversion is performed unless the ``QUOTE_NONNUMERIC`` format
+ automatic data type conversion is performed unless the :data:`QUOTE_NONNUMERIC` format
option is specified (in which case unquoted fields are transformed into floats).
A short usage example::
@@ -331,8 +331,14 @@ The :mod:`csv` module defines the following constants:
Instructs :class:`writer` objects to quote all non-numeric fields.
- Instructs :class:`reader` objects to convert all non-quoted fields to type *float*.
+ Instructs :class:`reader` objects to convert all non-quoted fields to type :class:`float`.
+ .. note::
+ Some numeric types, such as :class:`bool`, :class:`~fractions.Fraction`,
+ or :class:`~enum.IntEnum`, have a string representation that cannot be
+ converted to :class:`float`.
+ They cannot be read in the :data:`QUOTE_NONNUMERIC` and
+ :data:`QUOTE_STRINGS` modes.
.. data:: QUOTE_NONE
diff --git a/Doc/library/ctypes.rst b/Doc/library/ctypes.rst
index 5b733d5321e..8e74c6c9dee 100644
--- a/Doc/library/ctypes.rst
+++ b/Doc/library/ctypes.rst
@@ -2031,35 +2031,55 @@ Utility functions
pointer.
-.. function:: create_string_buffer(init_or_size, size=None)
+.. function:: create_string_buffer(init, size=None)
+ create_string_buffer(size)
This function creates a mutable character buffer. The returned object is a
ctypes array of :class:`c_char`.
- *init_or_size* must be an integer which specifies the size of the array, or a
- bytes object which will be used to initialize the array items.
+ If *size* is given (and not ``None``), it must be an :class:`int`.
+ It specifies the size of the returned array.
- If a bytes object is specified as first argument, the buffer is made one item
- larger than its length so that the last element in the array is a NUL
- termination character. An integer can be passed as second argument which allows
- specifying the size of the array if the length of the bytes should not be used.
+ If the *init* argument is given, it must be :class:`bytes`. It is used
+ to initialize the array items. Bytes not initialized this way are
+ set to zero (NUL).
+
+ If *size* is not given (or if it is ``None``), the buffer is made one element
+ larger than *init*, effectively adding a NUL terminator.
+
+ If both arguments are given, *size* must not be less than ``len(init)``.
+
+ .. warning::
+
+ If *size* is equal to ``len(init)``, a NUL terminator is
+ not added. Do not treat such a buffer as a C string.
+
+ For example::
+
+ >>> bytes(create_string_buffer(2))
+ b'\x00\x00'
+ >>> bytes(create_string_buffer(b'ab'))
+ b'ab\x00'
+ >>> bytes(create_string_buffer(b'ab', 2))
+ b'ab'
+ >>> bytes(create_string_buffer(b'ab', 4))
+ b'ab\x00\x00'
+ >>> bytes(create_string_buffer(b'abcdef', 2))
+ Traceback (most recent call last):
+ ...
+ ValueError: byte string too long
.. audit-event:: ctypes.create_string_buffer init,size ctypes.create_string_buffer
-.. function:: create_unicode_buffer(init_or_size, size=None)
+.. function:: create_unicode_buffer(init, size=None)
+ create_unicode_buffer(size)
This function creates a mutable unicode character buffer. The returned object is
a ctypes array of :class:`c_wchar`.
- *init_or_size* must be an integer which specifies the size of the array, or a
- string which will be used to initialize the array items.
-
- If a string is specified as first argument, the buffer is made one item
- larger than the length of the string so that the last element in the array is a
- NUL termination character. An integer can be passed as second argument which
- allows specifying the size of the array if the length of the string should not
- be used.
+ The function takes the same arguments as :func:`~create_string_buffer` except
+ *init* must be a string and *size* counts :class:`c_wchar`.
.. audit-event:: ctypes.create_unicode_buffer init,size ctypes.create_unicode_buffer
diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst
index 36221c026d6..39e287b1521 100644
--- a/Doc/library/dbm.rst
+++ b/Doc/library/dbm.rst
@@ -15,10 +15,16 @@
* :mod:`dbm.ndbm`
If none of these modules are installed, the
-slow-but-simple implementation in module :mod:`dbm.dumb` will be used. There
+slow-but-simple implementation in module :mod:`dbm.dumb` will be used. There
is a `third party interface <https://www.jcea.es/programacion/pybsddb.htm>`_ to
the Oracle Berkeley DB.
+.. note::
+ None of the underlying modules will automatically shrink the disk space used by
+ the database file. However, :mod:`dbm.sqlite3`, :mod:`dbm.gnu` and :mod:`dbm.dumb`
+ provide a :meth:`!reorganize` method that can be used for this purpose.
+
+
.. exception:: error
A tuple containing the exceptions that can be raised by each of the supported
@@ -186,6 +192,17 @@ or any other SQLite browser, including the SQLite CLI.
The Unix file access mode of the file (default: octal ``0o666``),
used only when the database has to be created.
+ .. method:: sqlite3.reorganize()
+
+ If you have carried out a lot of deletions and would like to shrink the space
+ used on disk, this method will reorganize the database; otherwise, deleted file
+ space will be kept and reused as new (key, value) pairs are added.
+
+ .. note::
+ While reorganizing, as much as two times the size of the original database is required
+ in free disk space. However, be aware that this factor changes for each :mod:`dbm` submodule.
+
+ .. versionadded:: next
:mod:`dbm.gnu` --- GNU database manager
---------------------------------------
@@ -237,6 +254,9 @@ functionality like crash tolerance.
* ``'s'``: Synchronized mode.
Changes to the database will be written immediately to the file.
* ``'u'``: Do not lock database.
+ * ``'m'``: Do not use :manpage:`mmap(2)`.
+ This may harm performance, but improve crash tolerance.
+ .. versionadded:: next
Not all flags are valid for all versions of GDBM.
See the :data:`open_flags` member for a list of supported flag characters.
@@ -284,6 +304,10 @@ functionality like crash tolerance.
reorganization; otherwise, deleted file space will be kept and reused as new
(key, value) pairs are added.
+ .. note::
+ While reorganizing, as much as one time the size of the original database is required
+ in free disk space. However, be aware that this factor changes for each :mod:`dbm` submodule.
+
.. method:: gdbm.sync()
When the database has been opened in fast mode, this method forces any
@@ -438,6 +462,11 @@ The :mod:`!dbm.dumb` module defines the following:
with a sufficiently large/complex entry due to stack depth limitations in
Python's AST compiler.
+ .. warning::
+ :mod:`dbm.dumb` does not support concurrent read/write access. (Multiple
+ simultaneous read accesses are safe.) When a program has the database open
+ for writing, no other program should have it open for reading or writing.
+
.. versionchanged:: 3.5
:func:`~dbm.dumb.open` always creates a new database when *flag* is ``'n'``.
@@ -460,3 +489,15 @@ The :mod:`!dbm.dumb` module defines the following:
.. method:: dumbdbm.close()
Close the database.
+
+ .. method:: dumbdbm.reorganize()
+
+ If you have carried out a lot of deletions and would like to shrink the space
+ used on disk, this method will reorganize the database; otherwise, deleted file
+ space will not be reused.
+
+ .. note::
+ While reorganizing, no additional free disk space is required. However, be aware
+ that this factor changes for each :mod:`dbm` submodule.
+
+ .. versionadded:: next
diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst
index b86fef9fd6f..fb43cf918b8 100644
--- a/Doc/library/doctest.rst
+++ b/Doc/library/doctest.rst
@@ -174,7 +174,7 @@ with assorted summaries at the end.
You can force verbose mode by passing ``verbose=True`` to :func:`testmod`, or
prohibit it by passing ``verbose=False``. In either of those cases,
-``sys.argv`` is not examined by :func:`testmod` (so passing ``-v`` or not
+:data:`sys.argv` is not examined by :func:`testmod` (so passing ``-v`` or not
has no effect).
There is also a command line shortcut for running :func:`testmod`, see section
@@ -231,7 +231,7 @@ documentation::
As with :func:`testmod`, :func:`testfile` won't display anything unless an
example fails. If an example does fail, then the failing example(s) and the
cause(s) of the failure(s) are printed to stdout, using the same format as
-:func:`testmod`.
+:func:`!testmod`.
By default, :func:`testfile` looks for files in the calling module's directory.
See section :ref:`doctest-basic-api` for a description of the optional arguments
@@ -311,6 +311,9 @@ Which Docstrings Are Examined?
The module docstring, and all function, class and method docstrings are
searched. Objects imported into the module are not searched.
+.. attribute:: module.__test__
+ :no-typesetting:
+
In addition, there are cases when you want tests to be part of a module but not part
of the help text, which requires that the tests not be included in the docstring.
Doctest looks for a module-level variable called ``__test__`` and uses it to locate other
@@ -533,7 +536,7 @@ Some details you should read once, but won't need to remember:
* The interactive shell omits the traceback header line for some
:exc:`SyntaxError`\ s. But doctest uses the traceback header line to
distinguish exceptions from non-exceptions. So in the rare case where you need
- to test a :exc:`SyntaxError` that omits the traceback header, you will need to
+ to test a :exc:`!SyntaxError` that omits the traceback header, you will need to
manually add the traceback header line to your test example.
.. index:: single: ^ (caret); marker
@@ -860,15 +863,15 @@ The :const:`ELLIPSIS` directive gives a nice approach for the last example:
<C object at 0x...>
Floating-point numbers are also subject to small output variations across
-platforms, because Python defers to the platform C library for float formatting,
-and C libraries vary widely in quality here. ::
+platforms, because Python defers to the platform C library for some
+floating-point calculations, and C libraries vary widely in quality here. ::
- >>> 1./7 # risky
- 0.14285714285714285
- >>> print(1./7) # safer
- 0.142857142857
- >>> print(round(1./7, 6)) # much safer
- 0.142857
+ >>> 1000**0.1 # risky
+ 1.9952623149688797
+ >>> round(1000**0.1, 9) # safer
+ 1.995262315
+ >>> print(f'{1000**0.1:.4f}') # much safer
+ 1.9953
Numbers of the form ``I/2.**J`` are safe across all platforms, and I often
contrive doctest examples to produce numbers of that form::
@@ -938,13 +941,13 @@ and :ref:`doctest-simple-testfile`.
Optional argument *verbose* prints lots of stuff if true, and prints only
failures if false; by default, or if ``None``, it's true if and only if ``'-v'``
- is in ``sys.argv``.
+ is in :data:`sys.argv`.
Optional argument *report* prints a summary at the end when true, else prints
nothing at the end. In verbose mode, the summary is detailed, else the summary
is very brief (in fact, empty if all tests passed).
- Optional argument *optionflags* (default value 0) takes the
+ Optional argument *optionflags* (default value ``0``) takes the
:ref:`bitwise OR <bitwise>` of option flags.
See section :ref:`doctest-options`.
@@ -1043,12 +1046,15 @@ from text files and modules with doctests:
Convert doctest tests from one or more text files to a
:class:`unittest.TestSuite`.
- The returned :class:`unittest.TestSuite` is to be run by the unittest framework
- and runs the interactive examples in each file. If an example in any file
- fails, then the synthesized unit test fails, and a :exc:`failureException`
- exception is raised showing the name of the file containing the test and a
- (sometimes approximate) line number. If all the examples in a file are
- skipped, then the synthesized unit test is also marked as skipped.
+ The returned :class:`unittest.TestSuite` is to be run by the unittest
+ framework and runs the interactive examples in each file.
+ Each file is run as a separate unit test, and each example in a file
+ is run as a :ref:`subtest <subtests>`.
+ If any example in a file fails, then the synthesized unit test fails.
+ The traceback for failure or error contains the name of the file
+ containing the test and a (sometimes approximate) line number.
+ If all the examples in a file are skipped, then the synthesized unit
+ test is also marked as skipped.
Pass one or more paths (as strings) to text files to be examined.
@@ -1078,13 +1084,14 @@ from text files and modules with doctests:
Optional argument *setUp* specifies a set-up function for the test suite.
This is called before running the tests in each file. The *setUp* function
- will be passed a :class:`DocTest` object. The setUp function can access the
- test globals as the *globs* attribute of the test passed.
+ will be passed a :class:`DocTest` object. The *setUp* function can access the
+ test globals as the :attr:`~DocTest.globs` attribute of the test passed.
Optional argument *tearDown* specifies a tear-down function for the test
suite. This is called after running the tests in each file. The *tearDown*
- function will be passed a :class:`DocTest` object. The setUp function can
- access the test globals as the *globs* attribute of the test passed.
+ function will be passed a :class:`DocTest` object. The *tearDown* function can
+ access the test globals as the :attr:`~DocTest.globs` attribute of the test
+ passed.
Optional argument *globs* is a dictionary containing the initial global
variables for the tests. A new copy of this dictionary is created for each
@@ -1105,16 +1112,22 @@ from text files and modules with doctests:
The global ``__file__`` is added to the globals provided to doctests loaded
from a text file using :func:`DocFileSuite`.
+ .. versionchanged:: next
+ Run each example as a :ref:`subtest <subtests>`.
+
.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, optionflags=0, checker=None)
Convert doctest tests for a module to a :class:`unittest.TestSuite`.
- The returned :class:`unittest.TestSuite` is to be run by the unittest framework
- and runs each doctest in the module. If any of the doctests fail, then the
- synthesized unit test fails, and a :exc:`failureException` exception is raised
- showing the name of the file containing the test and a (sometimes approximate)
- line number. If all the examples in a docstring are skipped, then the
+ The returned :class:`unittest.TestSuite` is to be run by the unittest
+ framework and runs each doctest in the module.
+ Each docstring is run as a separate unit test, and each example in
+ a docstring is run as a :ref:`subtest <subtests>`.
+ If any of the doctests fail, then the synthesized unit test fails.
+ The traceback for failure or error contains the name of the file
+ containing the test and a (sometimes approximate) line number.
+ If all the examples in a docstring are skipped, then the
synthesized unit test is also marked as skipped.
Optional argument *module* provides the module to be tested. It can be a module
@@ -1123,7 +1136,7 @@ from text files and modules with doctests:
Optional argument *globs* is a dictionary containing the initial global
variables for the tests. A new copy of this dictionary is created for each
- test. By default, *globs* is a new empty dictionary.
+ test. By default, *globs* is the module's :attr:`~module.__dict__`.
Optional argument *extraglobs* specifies an extra set of global variables, which
is merged into *globs*. By default, no extra globals are used.
@@ -1132,7 +1145,7 @@ from text files and modules with doctests:
drop-in replacement) that is used to extract doctests from the module.
Optional arguments *setUp*, *tearDown*, and *optionflags* are the same as for
- function :func:`DocFileSuite` above.
+ function :func:`DocFileSuite` above, but they are called for each docstring.
This function uses the same search technique as :func:`testmod`.
@@ -1140,11 +1153,8 @@ from text files and modules with doctests:
:func:`DocTestSuite` returns an empty :class:`unittest.TestSuite` if *module*
contains no docstrings instead of raising :exc:`ValueError`.
-.. exception:: failureException
-
- When doctests which have been converted to unit tests by :func:`DocFileSuite`
- or :func:`DocTestSuite` fail, this exception is raised showing the name of
- the file containing the test and a (sometimes approximate) line number.
+ .. versionchanged:: next
+ Run each example as a :ref:`subtest <subtests>`.
Under the covers, :func:`DocTestSuite` creates a :class:`unittest.TestSuite` out
of :class:`!doctest.DocTestCase` instances, and :class:`!DocTestCase` is a
@@ -1158,15 +1168,15 @@ of :class:`!DocTestCase`.
So both ways of creating a :class:`unittest.TestSuite` run instances of
:class:`!DocTestCase`. This is important for a subtle reason: when you run
-:mod:`doctest` functions yourself, you can control the :mod:`doctest` options in
-use directly, by passing option flags to :mod:`doctest` functions. However, if
-you're writing a :mod:`unittest` framework, :mod:`unittest` ultimately controls
+:mod:`doctest` functions yourself, you can control the :mod:`!doctest` options in
+use directly, by passing option flags to :mod:`!doctest` functions. However, if
+you're writing a :mod:`unittest` framework, :mod:`!unittest` ultimately controls
when and how tests get run. The framework author typically wants to control
-:mod:`doctest` reporting options (perhaps, e.g., specified by command line
-options), but there's no way to pass options through :mod:`unittest` to
-:mod:`doctest` test runners.
+:mod:`!doctest` reporting options (perhaps, e.g., specified by command line
+options), but there's no way to pass options through :mod:`!unittest` to
+:mod:`!doctest` test runners.
-For this reason, :mod:`doctest` also supports a notion of :mod:`doctest`
+For this reason, :mod:`doctest` also supports a notion of :mod:`!doctest`
reporting flags specific to :mod:`unittest` support, via this function:
@@ -1181,12 +1191,12 @@ reporting flags specific to :mod:`unittest` support, via this function:
:mod:`unittest`: the :meth:`!runTest` method of :class:`!DocTestCase` looks at
the option flags specified for the test case when the :class:`!DocTestCase`
instance was constructed. If no reporting flags were specified (which is the
- typical and expected case), :mod:`!doctest`'s :mod:`unittest` reporting flags are
+ typical and expected case), :mod:`!doctest`'s :mod:`!unittest` reporting flags are
:ref:`bitwise ORed <bitwise>` into the option flags, and the option flags
so augmented are passed to the :class:`DocTestRunner` instance created to
run the doctest. If any reporting flags were specified when the
:class:`!DocTestCase` instance was constructed, :mod:`!doctest`'s
- :mod:`unittest` reporting flags are ignored.
+ :mod:`!unittest` reporting flags are ignored.
The value of the :mod:`unittest` reporting flags in effect before the function
was called is returned by the function.
@@ -1279,7 +1289,7 @@ DocTest Objects
.. attribute:: filename
The name of the file that this :class:`DocTest` was extracted from; or
- ``None`` if the filename is unknown, or if the :class:`DocTest` was not
+ ``None`` if the filename is unknown, or if the :class:`!DocTest` was not
extracted from a file.
@@ -1419,10 +1429,10 @@ DocTestFinder objects
The globals for each :class:`DocTest` is formed by combining *globs* and
*extraglobs* (bindings in *extraglobs* override bindings in *globs*). A new
- shallow copy of the globals dictionary is created for each :class:`DocTest`.
- If *globs* is not specified, then it defaults to the module's *__dict__*, if
- specified, or ``{}`` otherwise. If *extraglobs* is not specified, then it
- defaults to ``{}``.
+ shallow copy of the globals dictionary is created for each :class:`!DocTest`.
+ If *globs* is not specified, then it defaults to the module's
+ :attr:`~module.__dict__`, if specified, or ``{}`` otherwise.
+ If *extraglobs* is not specified, then it defaults to ``{}``.
.. _doctest-doctestparser:
@@ -1446,7 +1456,7 @@ DocTestParser objects
:class:`DocTest` object.
*globs*, *name*, *filename*, and *lineno* are attributes for the new
- :class:`DocTest` object. See the documentation for :class:`DocTest` for more
+ :class:`!DocTest` object. See the documentation for :class:`DocTest` for more
information.
@@ -1461,7 +1471,7 @@ DocTestParser objects
Divide the given string into examples and intervening text, and return them as
a list of alternating :class:`Example`\ s and strings. Line numbers for the
- :class:`Example`\ s are 0-based. The optional argument *name* is a name
+ :class:`!Example`\ s are 0-based. The optional argument *name* is a name
identifying this string, and is only used for error messages.
@@ -1501,14 +1511,14 @@ DocTestRunner objects
:class:`OutputChecker`. This comparison may be customized with a number of
option flags; see section :ref:`doctest-options` for more information. If the
option flags are insufficient, then the comparison may also be customized by
- passing a subclass of :class:`OutputChecker` to the constructor.
+ passing a subclass of :class:`!OutputChecker` to the constructor.
The test runner's display output can be controlled in two ways. First, an output
function can be passed to :meth:`run`; this function will be called
with strings that should be displayed. It defaults to ``sys.stdout.write``. If
capturing the output is not sufficient, then the display output can be also
customized by subclassing DocTestRunner, and overriding the methods
- :meth:`report_start`, :meth:`report_success`,
+ :meth:`report_skip`, :meth:`report_start`, :meth:`report_success`,
:meth:`report_unexpected_exception`, and :meth:`report_failure`.
The optional keyword argument *checker* specifies the :class:`OutputChecker`
@@ -1533,6 +1543,19 @@ DocTestRunner objects
:class:`DocTestRunner` defines the following methods:
+ .. method:: report_skip(out, test, example)
+
+ Report that the given example was skipped. This method is provided to
+ allow subclasses of :class:`DocTestRunner` to customize their output; it
+ should not be called directly.
+
+ *example* is the example about to be processed. *test* is the test
+ containing *example*. *out* is the output function that was passed to
+ :meth:`DocTestRunner.run`.
+
+ .. versionadded:: next
+
+
.. method:: report_start(out, test, example)
Report that the test runner is about to process the given example. This method
@@ -1540,7 +1563,7 @@ DocTestRunner objects
output; it should not be called directly.
*example* is the example about to be processed. *test* is the test
- *containing example*. *out* is the output function that was passed to
+ containing *example*. *out* is the output function that was passed to
:meth:`DocTestRunner.run`.
@@ -1940,7 +1963,7 @@ several options for organizing tests:
containing test cases for the named topics. These functions can be included in
the same file as the module, or separated out into a separate test file.
-* Define a ``__test__`` dictionary mapping from regression test topics to
+* Define a :attr:`~module.__test__` dictionary mapping from regression test topics to
docstrings containing test cases.
When you have placed your tests in a module, the module can itself be the test
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
index 4818a4944a5..8bba6700930 100644
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -94,6 +94,13 @@ accessible by name via :func:`new`. See :data:`algorithms_available`.
OpenSSL does not provide we fall back to a verified implementation from
the `HACL\* project`_.
+.. deprecated-removed:: 3.15 3.19
+ The undocumented ``string`` keyword parameter in :func:`!_hashlib.new`
+ and hash-named constructors such as :func:`!_md5.md5` is deprecated.
+ Prefer passing the initial data as a positional argument for maximum
+ backwards compatibility.
+
+
Usage
-----
diff --git a/Doc/library/logging.handlers.rst b/Doc/library/logging.handlers.rst
index 63ef533e82c..8f3aa1dfdd0 100644
--- a/Doc/library/logging.handlers.rst
+++ b/Doc/library/logging.handlers.rst
@@ -352,6 +352,10 @@ module, supports rotation of disk log files.
Outputs the record to the file, catering for rollover as described
previously.
+ .. method:: shouldRollover(record)
+
+ See if the supplied record would cause the file to exceed the configured size limit.
+
.. _timed-rotating-file-handler:
TimedRotatingFileHandler
@@ -459,7 +463,11 @@ timed intervals.
.. method:: getFilesToDelete()
Returns a list of filenames which should be deleted as part of rollover. These
- are the absolute paths of the oldest backup log files written by the handler.
+
+ .. method:: shouldRollover(record)
+
+ See if enough time has passed for a rollover to occur and if it has, compute
+ the next rollover time.
.. _socket-handler:
diff --git a/Doc/library/math.rst b/Doc/library/math.rst
index 394a462b946..c8061fb1638 100644
--- a/Doc/library/math.rst
+++ b/Doc/library/math.rst
@@ -10,8 +10,8 @@
--------------
-This module provides access to the mathematical functions defined by the C
-standard.
+This module provides access to common mathematical functions and constants,
+including those defined by the C standard.
These functions cannot be used with complex numbers; use the functions of the
same name from the :mod:`cmath` module if you require support for complex
@@ -53,6 +53,8 @@ noted otherwise, all return values are floats.
:func:`frexp(x) <frexp>` Mantissa and exponent of *x*
:func:`isclose(a, b, rel_tol, abs_tol) <isclose>` Check if the values *a* and *b* are close to each other
:func:`isfinite(x) <isfinite>` Check if *x* is neither an infinity nor a NaN
+:func:`isnormal(x) <isnormal>` Check if *x* is a normal number
+:func:`issubnormal(x) <issubnormal>` Check if *x* is a subnormal number
:func:`isinf(x) <isinf>` Check if *x* is a positive or negative infinity
:func:`isnan(x) <isnan>` Check if *x* is a NaN (not a number)
:func:`ldexp(x, i) <ldexp>` ``x * (2**i)``, inverse of function :func:`frexp`
@@ -373,6 +375,24 @@ Floating point manipulation functions
.. versionadded:: 3.2
+.. function:: isnormal(x)
+
+ Return ``True`` if *x* is a normal number, that is a finite
+ nonzero number that is not a subnormal (see :func:`issubnormal`).
+ Return ``False`` otherwise.
+
+ .. versionadded:: next
+
+
+.. function:: issubnormal(x)
+
+ Return ``True`` if *x* is a subnormal number, that is a finite
+ nonzero number with a magnitude smaller than the smallest positive normal
+ number, see :data:`sys.float_info.min`. Return ``False`` otherwise.
+
+ .. versionadded:: next
+
+
.. function:: isinf(x)
Return ``True`` if *x* is a positive or negative infinity, and
diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst
index ecbbc1d7605..f72aee19d8f 100644
--- a/Doc/library/os.path.rst
+++ b/Doc/library/os.path.rst
@@ -408,9 +408,26 @@ the :mod:`glob` module.)
system). On Windows, this function will also resolve MS-DOS (also called 8.3)
style names such as ``C:\\PROGRA~1`` to ``C:\\Program Files``.
- If a path doesn't exist or a symlink loop is encountered, and *strict* is
- ``True``, :exc:`OSError` is raised. If *strict* is ``False`` these errors
- are ignored, and so the result might be missing or otherwise inaccessible.
+ By default, the path is evaluated up to the first component that does not
+ exist, is a symlink loop, or whose evaluation raises :exc:`OSError`.
+ All such components are appended unchanged to the existing part of the path.
+
+ Some errors that are handled this way include "access denied", "not a
+ directory", or "bad argument to internal function". Thus, the
+ resulting path may be missing or inaccessible, may still contain
+ links or loops, and may traverse non-directories.
+
+ This behavior can be modified by keyword arguments:
+
+ If *strict* is ``True``, the first error encountered when evaluating the path is
+ re-raised.
+ In particular, :exc:`FileNotFoundError` is raised if *path* does not exist,
+ or another :exc:`OSError` if it is otherwise inaccessible.
+
+ If *strict* is :py:data:`os.path.ALLOW_MISSING`, errors other than
+ :exc:`FileNotFoundError` are re-raised (as with ``strict=True``).
+ Thus, the returned path will not contain any symbolic links, but the named
+ file and some of its parent directories may be missing.
.. note::
This function emulates the operating system's procedure for making a path
@@ -429,6 +446,15 @@ the :mod:`glob` module.)
.. versionchanged:: 3.10
The *strict* parameter was added.
+ .. versionchanged:: next
+ The :py:data:`~os.path.ALLOW_MISSING` value for the *strict* parameter
+ was added.
+
+.. data:: ALLOW_MISSING
+
+ Special value used for the *strict* argument in :func:`realpath`.
+
+ .. versionadded:: next
.. function:: relpath(path, start=os.curdir)
diff --git a/Doc/library/shelve.rst b/Doc/library/shelve.rst
index 6e74a59b82b..23a2e0c3d0c 100644
--- a/Doc/library/shelve.rst
+++ b/Doc/library/shelve.rst
@@ -75,8 +75,15 @@ Two additional methods are supported:
Write back all entries in the cache if the shelf was opened with *writeback*
set to :const:`True`. Also empty the cache and synchronize the persistent
- dictionary on disk, if feasible. This is called automatically when the shelf
- is closed with :meth:`close`.
+ dictionary on disk, if feasible. This is called automatically when
+ :meth:`reorganize` is called or the shelf is closed with :meth:`close`.
+
+.. method:: Shelf.reorganize()
+
+ Calls :meth:`sync` and attempts to shrink space used on disk by removing empty
+ space resulting from deletions.
+
+ .. versionadded:: next
.. method:: Shelf.close()
@@ -116,6 +123,11 @@ Restrictions
* On macOS :mod:`dbm.ndbm` can silently corrupt the database file on updates,
which can cause hard crashes when trying to read from the database.
+* :meth:`Shelf.reorganize` may not be available for all database packages and
+ may temporarely increase resource usage (especially disk space) when called.
+ Additionally, it will never run automatically and instead needs to be called
+ explicitly.
+
.. class:: Shelf(dict, protocol=None, writeback=False, keyencoding='utf-8')
diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst
index 75fd637045d..bc89a3228f0 100644
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -1492,7 +1492,7 @@ The :mod:`socket` module also offers various network-related services:
The *fds* parameter is a sequence of file descriptors.
Consult :meth:`~socket.sendmsg` for the documentation of these parameters.
- .. availability:: Unix, Windows, not WASI.
+ .. availability:: Unix, not WASI.
Unix platforms supporting :meth:`~socket.sendmsg`
and :const:`SCM_RIGHTS` mechanism.
@@ -1506,9 +1506,9 @@ The :mod:`socket` module also offers various network-related services:
Return ``(msg, list(fds), flags, addr)``.
Consult :meth:`~socket.recvmsg` for the documentation of these parameters.
- .. availability:: Unix, Windows, not WASI.
+ .. availability:: Unix, not WASI.
- Unix platforms supporting :meth:`~socket.sendmsg`
+ Unix platforms supporting :meth:`~socket.recvmsg`
and :const:`SCM_RIGHTS` mechanism.
.. versionadded:: 3.9
diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst
index 31d71031bca..f0b4b09ff10 100644
--- a/Doc/library/stdtypes.rst
+++ b/Doc/library/stdtypes.rst
@@ -1214,6 +1214,8 @@ accepts integers that meet the value restriction ``0 <= x <= 255``).
| ``s[i] = x`` | item *i* of *s* is replaced by | |
| | *x* | |
+------------------------------+--------------------------------+---------------------+
+| ``del s[i]`` | removes item *i* of *s* | |
++------------------------------+--------------------------------+---------------------+
| ``s[i:j] = t`` | slice of *s* from *i* to *j* | |
| | is replaced by the contents of | |
| | the iterable *t* | |
diff --git a/Doc/library/string.rst b/Doc/library/string.rst
index c4012483a52..23e15780075 100644
--- a/Doc/library/string.rst
+++ b/Doc/library/string.rst
@@ -328,7 +328,7 @@ The general form of a *standard format specifier* is:
sign: "+" | "-" | " "
width_and_precision: [`width_with_grouping`][`precision_with_grouping`]
width_with_grouping: [`width`][`grouping`]
- precision_with_grouping: "." [`precision`][`grouping`]
+ precision_with_grouping: "." [`precision`][`grouping`] | "." `grouping`
width: `~python-grammar:digit`+
precision: `~python-grammar:digit`+
grouping: "," | "_"
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index f9cb5495e60..7cec108a5bd 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -255,6 +255,15 @@ The :mod:`tarfile` module defines the following exceptions:
Raised to refuse extracting a symbolic link pointing outside the destination
directory.
+.. exception:: LinkFallbackError
+
+ Raised to refuse emulating a link (hard or symbolic) by extracting another
+ archive member, when that member would be rejected by the filter location.
+ The exception that was raised to reject the replacement member is available
+ as :attr:`!BaseException.__context__`.
+
+ .. versionadded:: next
+
The following constants are available at the module level:
@@ -1068,6 +1077,12 @@ reused in custom filters:
Implements the ``'data'`` filter.
In addition to what ``tar_filter`` does:
+ - Normalize link targets (:attr:`TarInfo.linkname`) using
+ :func:`os.path.normpath`.
+ Note that this removes internal ``..`` components, which may change the
+ meaning of the link if the path in :attr:`!TarInfo.linkname` traverses
+ symbolic links.
+
- :ref:`Refuse <tarfile-extraction-refuse>` to extract links (hard or soft)
that link to absolute paths, or ones that link outside the destination.
@@ -1099,6 +1114,10 @@ reused in custom filters:
Note that this filter does not block *all* dangerous archive features.
See :ref:`tarfile-further-verification` for details.
+ .. versionchanged:: next
+
+ Link targets are now normalized.
+
.. _tarfile-extraction-refuse:
@@ -1127,6 +1146,7 @@ Here is an incomplete list of things to consider:
* Extract to a :func:`new temporary directory <tempfile.mkdtemp>`
to prevent e.g. exploiting pre-existing links, and to make it easier to
clean up after a failed extraction.
+* Disallow symbolic links if you do not need the functionality.
* When working with untrusted data, use external (e.g. OS-level) limits on
disk, memory and CPU usage.
* Check filenames against an allow-list of characters
diff --git a/Doc/library/typing.rst b/Doc/library/typing.rst
index dd8ea3c364f..69df09c7795 100644
--- a/Doc/library/typing.rst
+++ b/Doc/library/typing.rst
@@ -3530,28 +3530,32 @@ Constant
.. data:: TYPE_CHECKING
A special constant that is assumed to be ``True`` by 3rd party static
- type checkers. It is ``False`` at runtime.
+ type checkers. It's ``False`` at runtime.
+
+ A module which is expensive to import, and which only contain types
+ used for typing annotations, can be safely imported inside an
+ ``if TYPE_CHECKING:`` block. This prevents the module from actually
+ being imported at runtime; annotations aren't eagerly evaluated
+ (see :pep:`649`) so using undefined symbols in annotations is
+ harmless--as long as you don't later examine them.
+ Your static type analysis tool will set ``TYPE_CHECKING`` to
+ ``True`` during static type analysis, which means the module will
+ be imported and the types will be checked properly during such analysis.
Usage::
if TYPE_CHECKING:
import expensive_mod
- def fun(arg: 'expensive_mod.SomeType') -> None:
+ def fun(arg: expensive_mod.SomeType) -> None:
local_var: expensive_mod.AnotherType = other_fun()
- The first type annotation must be enclosed in quotes, making it a
- "forward reference", to hide the ``expensive_mod`` reference from the
- interpreter runtime. Type annotations for local variables are not
- evaluated, so the second annotation does not need to be enclosed in quotes.
-
- .. note::
-
- If ``from __future__ import annotations`` is used,
- annotations are not evaluated at function definition time.
- Instead, they are stored as strings in ``__annotations__``.
- This makes it unnecessary to use quotes around the annotation
- (see :pep:`563`).
+ If you occasionally need to examine type annotations at runtime
+ which may contain undefined symbols, use
+ :meth:`annotationlib.get_annotations` with a ``format`` parameter
+ of :attr:`annotationlib.Format.STRING` or
+ :attr:`annotationlib.Format.FORWARDREF` to safely retrieve the
+ annotations without raising :exc:`NameError`.
.. versionadded:: 3.5.2
diff --git a/Doc/library/zlib.rst b/Doc/library/zlib.rst
index 75ead3c4cb1..7c5e9b086e1 100644
--- a/Doc/library/zlib.rst
+++ b/Doc/library/zlib.rst
@@ -44,6 +44,20 @@ The available exception and functions in this module are:
.. versionchanged:: 3.0
The result is always unsigned.
+.. function:: adler32_combine(adler1, adler2, len2, /)
+
+ Combine two Adler-32 checksums into one.
+
+ Given the Adler-32 checksum *adler1* of a sequence ``A`` and the
+ Adler-32 checksum *adler2* of a sequence ``B`` of length *len2*,
+ return the Adler-32 checksum of ``A`` and ``B`` concatenated.
+
+ This function is typically useful to combine Adler-32 checksums
+ that were concurrently computed. To compute checksums sequentially, use
+ :func:`adler32` with the running checksum as the ``value`` argument.
+
+ .. versionadded:: next
+
.. function:: compress(data, /, level=-1, wbits=MAX_WBITS)
Compresses the bytes in *data*, returning a bytes object containing compressed data.
@@ -136,6 +150,20 @@ The available exception and functions in this module are:
.. versionchanged:: 3.0
The result is always unsigned.
+.. function:: crc32_combine(crc1, crc2, len2, /)
+
+ Combine two CRC-32 checksums into one.
+
+ Given the CRC-32 checksum *crc1* of a sequence ``A`` and the
+ CRC-32 checksum *crc2* of a sequence ``B`` of length *len2*,
+ return the CRC-32 checksum of ``A`` and ``B`` concatenated.
+
+ This function is typically useful to combine CRC-32 checksums
+ that were concurrently computed. To compute checksums sequentially, use
+ :func:`crc32` with the running checksum as the ``value`` argument.
+
+ .. versionadded:: next
+
.. function:: decompress(data, /, wbits=MAX_WBITS, bufsize=DEF_BUF_SIZE)
Decompresses the bytes in *data*, returning a bytes object containing the
diff --git a/Doc/reference/datamodel.rst b/Doc/reference/datamodel.rst
index 005a768f684..32a2e266262 100644
--- a/Doc/reference/datamodel.rst
+++ b/Doc/reference/datamodel.rst
@@ -1228,10 +1228,22 @@ Special attributes
:attr:`__annotations__ attributes <object.__annotations__>`.
For best practices on working with :attr:`~object.__annotations__`,
- please see :mod:`annotationlib`. Where possible, use
+ please see :mod:`annotationlib`. Use
:func:`annotationlib.get_annotations` instead of accessing this
attribute directly.
+ .. warning::
+
+ Accessing the :attr:`!__annotations__` attribute directly
+ on a class object may return annotations for the wrong class, specifically
+ in certain cases where the class, its base class, or a metaclass
+ is defined under ``from __future__ import annotations``.
+ See :pep:`749 <749#pep749-metaclasses>` for details.
+
+ This attribute does not exist on certain builtin classes. On
+ user-defined classes without ``__annotations__``, it is an
+ empty dictionary.
+
.. versionchanged:: 3.14
Annotations are now :ref:`lazily evaluated <lazy-evaluation>`.
See :pep:`649`.
diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analysis.rst
index 6c4a4ea81af..b22eb4db794 100644
--- a/Doc/reference/lexical_analysis.rst
+++ b/Doc/reference/lexical_analysis.rst
@@ -489,8 +489,9 @@ String literals are described by the following lexical definitions:
.. productionlist:: python-grammar
stringliteral: [`stringprefix`](`shortstring` | `longstring`)
- stringprefix: "r" | "u" | "R" | "U" | "f" | "F"
+ stringprefix: "r" | "u" | "R" | "U" | "f" | "F" | "t" | "T"
: | "fr" | "Fr" | "fR" | "FR" | "rf" | "rF" | "Rf" | "RF"
+ : | "tr" | "Tr" | "tR" | "TR" | "rt" | "rT" | "Rt" | "RT"
shortstring: "'" `shortstringitem`* "'" | '"' `shortstringitem`* '"'
longstring: "'''" `longstringitem`* "'''" | '"""' `longstringitem`* '"""'
shortstringitem: `shortstringchar` | `stringescapeseq`
diff --git a/Doc/tools/extensions/audit_events.py b/Doc/tools/extensions/audit_events.py
index 23d82c0f441..385a58b2145 100644
--- a/Doc/tools/extensions/audit_events.py
+++ b/Doc/tools/extensions/audit_events.py
@@ -13,7 +13,7 @@ from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective
if TYPE_CHECKING:
- from collections.abc import Iterator
+ from collections.abc import Iterator, Set
from sphinx.application import Sphinx
from sphinx.builders import Builder
@@ -33,7 +33,7 @@ _SYNONYMS = [
class AuditEvents:
def __init__(self) -> None:
self.events: dict[str, list[str]] = {}
- self.sources: dict[str, list[tuple[str, str]]] = {}
+ self.sources: dict[str, set[tuple[str, str]]] = {}
def __iter__(self) -> Iterator[tuple[str, list[str], tuple[str, str]]]:
for name, args in self.events.items():
@@ -47,7 +47,7 @@ class AuditEvents:
self._check_args_match(name, args)
else:
self.events[name] = args
- self.sources.setdefault(name, []).append(source)
+ self.sources.setdefault(name, set()).add(source)
def _check_args_match(self, name: str, args: list[str]) -> None:
current_args = self.events[name]
@@ -69,11 +69,11 @@ class AuditEvents:
return
def id_for(self, name) -> str:
- source_count = len(self.sources.get(name, ()))
+ source_count = len(self.sources.get(name, set()))
name_clean = re.sub(r"\W", "_", name)
return f"audit_event_{name_clean}_{source_count}"
- def rows(self) -> Iterator[tuple[str, list[str], list[tuple[str, str]]]]:
+ def rows(self) -> Iterator[tuple[str, list[str], Set[tuple[str, str]]]]:
for name in sorted(self.events.keys()):
yield name, self.events[name], self.sources[name]
@@ -218,7 +218,7 @@ class AuditEventListTransform(SphinxPostTransform):
docname: str,
name: str,
args: list[str],
- sources: list[tuple[str, str]],
+ sources: Set[tuple[str, str]],
) -> nodes.row:
row = nodes.row()
name_node = nodes.paragraph("", nodes.Text(name))
@@ -233,7 +233,7 @@ class AuditEventListTransform(SphinxPostTransform):
row += nodes.entry("", args_node)
backlinks_node = nodes.paragraph()
- backlinks = enumerate(sorted(set(sources)), start=1)
+ backlinks = enumerate(sorted(sources), start=1)
for i, (doc, label) in backlinks:
if isinstance(label, str):
ref = nodes.reference("", f"[{i}]", internal=True)
@@ -258,7 +258,7 @@ def setup(app: Sphinx):
app.connect("env-purge-doc", audit_events_purge)
app.connect("env-merge-info", audit_events_merge)
return {
- "version": "1.0",
+ "version": "2.0",
"parallel_read_safe": True,
"parallel_write_safe": True,
}
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index 88e52015bdc..45e68aea5fb 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -74,7 +74,7 @@ deferred evaluation of annotations (:pep:`649`),
and a new type of interpreter that uses tail calls.
The library changes include the addition of a new :mod:`!annotationlib` module
-for introspecting and wrapping annotations (:pep:`649`),
+for introspecting and wrapping annotations (:pep:`749`),
a new :mod:`!compression.zstd` module for Zstandard support (:pep:`784`),
plus syntax highlighting in the REPL,
as well as the usual deprecations and removals,
@@ -342,15 +342,16 @@ For example the following expressions are now valid:
.. code-block:: python
try:
- release_new_sleep_token_album()
- except AlbumNotFound, SongsTooGoodToBeReleased:
- print("Sorry, no new album this year.")
+ connect_to_server()
+ except TimeoutError, ConnectionRefusedError:
+ print("Network issue encountered.")
# The same applies to except* (for exception groups):
+
try:
- release_new_sleep_token_album()
- except* AlbumNotFound, SongsTooGoodToBeReleased:
- print("Sorry, no new album this year.")
+ connect_to_server()
+ except* TimeoutError, ConnectionRefusedError:
+ print("Network issue encountered.")
Check :pep:`758` for more details.
@@ -444,6 +445,10 @@ In particular, do not read annotations directly from the namespace dictionary
attribute of type objects. Use :func:`annotationlib.get_annotate_from_class_namespace`
during class construction and :func:`annotationlib.get_annotations` afterwards.
+In previous releases, it was sometimes possible to access class annotations from
+an instance of an annotated class. This behavior was undocumented and accidental,
+and will no longer work in Python 3.14.
+
``from __future__ import annotations``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1450,7 +1455,7 @@ math
----
* Added more detailed error messages for domain errors in the module.
- (Contributed by by Charlie Zhao and Sergey B Kirpichev in :gh:`101410`.)
+ (Contributed by Charlie Zhao and Sergey B Kirpichev in :gh:`101410`.)
mimetypes
@@ -2501,6 +2506,11 @@ Changes in the Python API
See :ref:`above <whatsnew314-typing-union>` for more details.
(Contributed by Jelle Zijlstra in :gh:`105499`.)
+* The runtime behavior of annotations has changed in various ways; see
+ :ref:`above <whatsnew314-pep649>` for details. While most code that interacts
+ with annotations should continue to work, some undocumented details may behave
+ differently.
+
Build changes
=============
diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst
index bf186c191b0..daf3e8fb6c2 100644
--- a/Doc/whatsnew/3.15.rst
+++ b/Doc/whatsnew/3.15.rst
@@ -89,6 +89,51 @@ New modules
Improved modules
================
+dbm
+---
+
+* Added new :meth:`!reorganize` methods to :mod:`dbm.dumb` and :mod:`dbm.sqlite3`
+ which allow to recover unused free space previously occupied by deleted entries.
+ (Contributed by Andrea Oliveri in :gh:`134004`.)
+
+* Add the ``'m'`` flag for :func:`dbm.gnu.open` which allows to disable
+ the use of :manpage:`mmap(2)`.
+ This may harm performance, but improve crash tolerance.
+ (Contributed by Serhiy Storchaka in :gh:`66234`.)
+
+difflib
+-------
+
+* Improved the styling of HTML diff pages generated by the :class:`difflib.HtmlDiff`
+ class, and migrated the output to the HTML5 standard.
+ (Contributed by Jiahao Li in :gh:`134580`.)
+
+
+math
+----
+
+* Add :func:`math.isnormal` and :func:`math.issubnormal` functions.
+ (Contributed by Sergey B Kirpichev in :gh:`132908`.)
+
+
+os.path
+-------
+
+* The *strict* parameter to :func:`os.path.realpath` accepts a new value,
+ :data:`os.path.ALLOW_MISSING`.
+ If used, errors other than :exc:`FileNotFoundError` will be re-raised;
+ the resulting path can be missing but it will be free of symlinks.
+ (Contributed by Petr Viktorin for :cve:`2025-4517`.)
+
+
+shelve
+------
+
+* Added new :meth:`!reorganize` method to :mod:`shelve` used to recover unused free
+ space previously occupied by deleted entries.
+ (Contributed by Andrea Oliveri in :gh:`134004`.)
+
+
ssl
---
@@ -97,6 +142,38 @@ ssl
(Contributed by Will Childs-Klein in :gh:`133624`.)
+tarfile
+-------
+
+* :func:`~tarfile.data_filter` now normalizes symbolic link targets in order to
+ avoid path traversal attacks.
+ (Contributed by Petr Viktorin in :gh:`127987` and :cve:`2025-4138`.)
+* :func:`~tarfile.TarFile.extractall` now skips fixing up directory attributes
+ when a directory was removed or replaced by another kind of file.
+ (Contributed by Petr Viktorin in :gh:`127987` and :cve:`2024-12718`.)
+* :func:`~tarfile.TarFile.extract` and :func:`~tarfile.TarFile.extractall`
+ now (re-)apply the extraction filter when substituting a link (hard or
+ symbolic) with a copy of another archive member, and when fixing up
+ directory attributes.
+ The former raises a new exception, :exc:`~tarfile.LinkFallbackError`.
+ (Contributed by Petr Viktorin for :cve:`2025-4330` and :cve:`2024-12718`.)
+* :func:`~tarfile.TarFile.extract` and :func:`~tarfile.TarFile.extractall`
+ no longer extract rejected members when
+ :func:`~tarfile.TarFile.errorlevel` is zero.
+ (Contributed by Matt Prodani and Petr Viktorin in :gh:`112887`
+ and :cve:`2025-4435`.)
+
+
+zlib
+----
+
+* Allow combining two Adler-32 checksums via :func:`~zlib.adler32_combine`.
+ (Contributed by Callum Attryde and Bénédikt Tran in :gh:`134635`.)
+
+* Allow combining two CRC-32 checksums via :func:`~zlib.crc32_combine`.
+ (Contributed by Bénédikt Tran in :gh:`134635`.)
+
+
.. Add improved modules above alphabetically, not here at the end.
Optimizations
@@ -112,8 +189,20 @@ module_name
Deprecated
==========
-* module_name:
- TODO
+hashlib
+-------
+
+* In hash function constructors such as :func:`~hashlib.new` or the
+ direct hash-named constructors such as :func:`~hashlib.md5` and
+ :func:`~hashlib.sha256`, their optional initial data parameter could
+ also be passed a keyword argument named ``data=`` or ``string=`` in
+ various :mod:`hashlib` implementations.
+
+ Support for the ``string`` keyword argument name is now deprecated and
+ is slated for removal in Python 3.19. Prefer passing the initial data as
+ a positional argument for maximum backwards compatibility.
+
+ (Contributed by Bénédikt Tran in :gh:`134978`.)
.. Add deprecations above alphabetically, not here at the end.
@@ -121,6 +210,14 @@ Deprecated
Removed
=======
+ctypes
+------
+
+* Removed the undocumented function :func:`!ctypes.SetPointerType`,
+ which has been deprecated since Python 3.13.
+ (Contributed by Bénédikt Tran in :gh:`133866`.)
+
+
http.server
-----------
@@ -196,7 +293,17 @@ C API changes
New features
------------
-* TODO
+* Add :c:func:`PySys_GetAttr`, :c:func:`PySys_GetAttrString`,
+ :c:func:`PySys_GetOptionalAttr`, and :c:func:`PySys_GetOptionalAttrString`
+ functions as replacements for :c:func:`PySys_GetObject`.
+ (Contributed by Serhiy Storchaka in :gh:`108512`.)
+
+* Add :c:func:`PyUnicodeWriter_WriteASCII` function to write an ASCII string
+ into a :c:type:`PyUnicodeWriter`. The function is faster than
+ :c:func:`PyUnicodeWriter_WriteUTF8`, but has an undefined behavior if the
+ input string contains non-ASCII characters.
+ (Contributed by Victor Stinner in :gh:`133968`.)
+
Porting to Python 3.15
----------------------
diff --git a/Include/abstract.h b/Include/abstract.h
index b9199fc03a3..80f3298701d 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -138,7 +138,12 @@ extern "C" {
Delete attribute named attr_name, for object o. Returns
-1 on failure.
- This is the equivalent of the Python statement: del o.attr_name. */
+ This is the equivalent of the Python statement: del o.attr_name.
+
+ Implemented as a macro in the limited C API 3.12 and older. */
+#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 < 0x030d0000
+# define PyObject_DelAttrString(O, A) PyObject_SetAttrString((O), (A), NULL)
+#endif
/* Implemented elsewhere:
@@ -147,7 +152,12 @@ extern "C" {
Delete attribute named attr_name, for object o. Returns -1
on failure. This is the equivalent of the Python
- statement: del o.attr_name. */
+ statement: del o.attr_name.
+
+ Implemented as a macro in the limited C API 3.12 and older. */
+#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 < 0x030d0000
+# define PyObject_DelAttr(O, A) PyObject_SetAttr((O), (A), NULL)
+#endif
/* Implemented elsewhere:
diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h
index 136f5d5c5f8..3d0414f5291 100644
--- a/Include/cpython/unicodeobject.h
+++ b/Include/cpython/unicodeobject.h
@@ -478,6 +478,10 @@ PyAPI_FUNC(int) PyUnicodeWriter_WriteUTF8(
PyUnicodeWriter *writer,
const char *str,
Py_ssize_t size);
+PyAPI_FUNC(int) PyUnicodeWriter_WriteASCII(
+ PyUnicodeWriter *writer,
+ const char *str,
+ Py_ssize_t size);
PyAPI_FUNC(int) PyUnicodeWriter_WriteWideChar(
PyUnicodeWriter *writer,
const wchar_t *str,
diff --git a/Include/internal/mimalloc/mimalloc/internal.h b/Include/internal/mimalloc/mimalloc/internal.h
index 71b7ea702d6..a7daa3a40a4 100644
--- a/Include/internal/mimalloc/mimalloc/internal.h
+++ b/Include/internal/mimalloc/mimalloc/internal.h
@@ -634,10 +634,10 @@ static inline mi_block_t* mi_block_nextx( const void* null, const mi_block_t* bl
mi_track_mem_defined(block,sizeof(mi_block_t));
mi_block_t* next;
#ifdef MI_ENCODE_FREELIST
- next = (mi_block_t*)mi_ptr_decode(null, mi_atomic_load_relaxed(&block->next), keys);
+ next = (mi_block_t*)mi_ptr_decode(null, mi_atomic_load_relaxed((_Atomic(mi_encoded_t)*)&block->next), keys);
#else
MI_UNUSED(keys); MI_UNUSED(null);
- next = (mi_block_t*)mi_atomic_load_relaxed(&block->next);
+ next = (mi_block_t*)mi_atomic_load_relaxed((_Atomic(mi_encoded_t)*)&block->next);
#endif
mi_track_mem_noaccess(block,sizeof(mi_block_t));
return next;
diff --git a/Include/internal/mimalloc/mimalloc/types.h b/Include/internal/mimalloc/mimalloc/types.h
index 4f77bd7bc52..a04169f7fb8 100644
--- a/Include/internal/mimalloc/mimalloc/types.h
+++ b/Include/internal/mimalloc/mimalloc/types.h
@@ -50,6 +50,32 @@ terms of the MIT license. A copy of the license can be found in the file
#define mi_decl_cache_align
#endif
+#if (MI_DEBUG)
+#if defined(_MSC_VER)
+#define mi_decl_noreturn __declspec(noreturn)
+#elif (defined(__GNUC__) && (__GNUC__ >= 3)) || defined(__clang__)
+#define mi_decl_noreturn __attribute__((__noreturn__))
+#else
+#define mi_decl_noreturn
+#endif
+
+/*
+ * 'cold' attribute seems to have been fully supported since GCC 4.x.
+ * See https://github.com/gcc-mirror/gcc/commit/52bf96d2f299e9e6.
+ */
+#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__)
+#define mi_decl_cold __attribute__((cold))
+#else
+#define mi_decl_cold
+#endif
+
+#if (defined(__GNUC__) && defined(__THROW))
+#define mi_decl_throw __THROW
+#else
+#define mi_decl_throw
+#endif
+#endif
+
// ------------------------------------------------------
// Variants
// ------------------------------------------------------
@@ -582,7 +608,8 @@ struct mi_heap_s {
#if (MI_DEBUG)
// use our own assertion to print without memory allocation
-void _mi_assert_fail(const char* assertion, const char* fname, unsigned int line, const char* func );
+mi_decl_noreturn mi_decl_cold mi_decl_throw
+void _mi_assert_fail(const char* assertion, const char* fname, unsigned int line, const char* func);
#define mi_assert(expr) ((expr) ? (void)0 : _mi_assert_fail(#expr,__FILE__,__LINE__,__func__))
#else
#define mi_assert(x)
diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h
index 3d8247df31c..092feeb40b0 100644
--- a/Include/internal/pycore_ceval.h
+++ b/Include/internal/pycore_ceval.h
@@ -353,6 +353,8 @@ PyAPI_FUNC(_PyStackRef) _PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyS
extern int _PyRunRemoteDebugger(PyThreadState *tstate);
#endif
+_PyStackRef _PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_code.h b/Include/internal/pycore_code.h
index 439989c60f6..8e1415f27b6 100644
--- a/Include/internal/pycore_code.h
+++ b/Include/internal/pycore_code.h
@@ -313,7 +313,7 @@ extern void _Py_Specialize_CompareOp(_PyStackRef lhs, _PyStackRef rhs,
_Py_CODEUNIT *instr, int oparg);
extern void _Py_Specialize_UnpackSequence(_PyStackRef seq, _Py_CODEUNIT *instr,
int oparg);
-extern void _Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg);
+extern void _Py_Specialize_ForIter(_PyStackRef iter, _PyStackRef null_or_index, _Py_CODEUNIT *instr, int oparg);
extern void _Py_Specialize_Send(_PyStackRef receiver, _Py_CODEUNIT *instr);
extern void _Py_Specialize_ToBool(_PyStackRef value, _Py_CODEUNIT *instr);
extern void _Py_Specialize_ContainsOp(_PyStackRef value, _Py_CODEUNIT *instr);
diff --git a/Include/internal/pycore_compile.h b/Include/internal/pycore_compile.h
index aecc50be1e6..c18e04bf67a 100644
--- a/Include/internal/pycore_compile.h
+++ b/Include/internal/pycore_compile.h
@@ -95,6 +95,7 @@ typedef enum {
enum _PyCompile_FBlockType {
COMPILE_FBLOCK_WHILE_LOOP,
COMPILE_FBLOCK_FOR_LOOP,
+ COMPILE_FBLOCK_ASYNC_FOR_LOOP,
COMPILE_FBLOCK_TRY_EXCEPT,
COMPILE_FBLOCK_FINALLY_TRY,
COMPILE_FBLOCK_FINALLY_END,
diff --git a/Include/internal/pycore_crossinterp.h b/Include/internal/pycore_crossinterp.h
index 12729274138..713ddc66ba7 100644
--- a/Include/internal/pycore_crossinterp.h
+++ b/Include/internal/pycore_crossinterp.h
@@ -317,7 +317,9 @@ typedef enum error_code {
_PyXI_ERR_ALREADY_RUNNING = -4,
_PyXI_ERR_MAIN_NS_FAILURE = -5,
_PyXI_ERR_APPLY_NS_FAILURE = -6,
- _PyXI_ERR_NOT_SHAREABLE = -7,
+ _PyXI_ERR_PRESERVE_FAILURE = -7,
+ _PyXI_ERR_EXC_PROPAGATION_FAILURE = -8,
+ _PyXI_ERR_NOT_SHAREABLE = -9,
} _PyXI_errcode;
@@ -350,16 +352,33 @@ typedef struct xi_session _PyXI_session;
PyAPI_FUNC(_PyXI_session *) _PyXI_NewSession(void);
PyAPI_FUNC(void) _PyXI_FreeSession(_PyXI_session *);
+typedef struct {
+ PyObject *preserved;
+ PyObject *excinfo;
+ _PyXI_errcode errcode;
+} _PyXI_session_result;
+PyAPI_FUNC(void) _PyXI_ClearResult(_PyXI_session_result *);
+
PyAPI_FUNC(int) _PyXI_Enter(
_PyXI_session *session,
PyInterpreterState *interp,
- PyObject *nsupdates);
-PyAPI_FUNC(void) _PyXI_Exit(_PyXI_session *session);
-
-PyAPI_FUNC(PyObject *) _PyXI_GetMainNamespace(_PyXI_session *);
-
-PyAPI_FUNC(PyObject *) _PyXI_ApplyCapturedException(_PyXI_session *session);
-PyAPI_FUNC(int) _PyXI_HasCapturedException(_PyXI_session *session);
+ PyObject *nsupdates,
+ _PyXI_session_result *);
+PyAPI_FUNC(int) _PyXI_Exit(
+ _PyXI_session *,
+ _PyXI_errcode,
+ _PyXI_session_result *);
+
+PyAPI_FUNC(PyObject *) _PyXI_GetMainNamespace(
+ _PyXI_session *,
+ _PyXI_errcode *);
+
+PyAPI_FUNC(int) _PyXI_Preserve(
+ _PyXI_session *,
+ const char *,
+ PyObject *,
+ _PyXI_errcode *);
+PyAPI_FUNC(PyObject *) _PyXI_GetPreserved(_PyXI_session_result *, const char *);
/*************/
diff --git a/Include/internal/pycore_global_objects_fini_generated.h b/Include/internal/pycore_global_objects_fini_generated.h
index 356bcaa7c35..e118b86db50 100644
--- a/Include/internal/pycore_global_objects_fini_generated.h
+++ b/Include/internal/pycore_global_objects_fini_generated.h
@@ -892,6 +892,7 @@ _PyStaticObjects_CheckRefcnt(PyInterpreterState *interp) {
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(data));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(database));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(day));
+ _PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(debug));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(decode));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(decoder));
_PyStaticObject_CheckRefcnt((PyObject *)&_Py_ID(default));
diff --git a/Include/internal/pycore_global_strings.h b/Include/internal/pycore_global_strings.h
index aebe798031c..36f3d23d095 100644
--- a/Include/internal/pycore_global_strings.h
+++ b/Include/internal/pycore_global_strings.h
@@ -383,6 +383,7 @@ struct _Py_global_strings {
STRUCT_FOR_ID(data)
STRUCT_FOR_ID(database)
STRUCT_FOR_ID(day)
+ STRUCT_FOR_ID(debug)
STRUCT_FOR_ID(decode)
STRUCT_FOR_ID(decoder)
STRUCT_FOR_ID(default)
diff --git a/Include/internal/pycore_interp_structs.h b/Include/internal/pycore_interp_structs.h
index 8a29c533b99..f25f5847b3b 100644
--- a/Include/internal/pycore_interp_structs.h
+++ b/Include/internal/pycore_interp_structs.h
@@ -677,8 +677,11 @@ struct _Py_interp_cached_objects {
/* object.__reduce__ */
PyObject *objreduce;
+#ifndef Py_GIL_DISABLED
+ /* resolve_slotdups() */
PyObject *type_slots_pname;
pytype_slotdef *type_slots_ptrs[MAX_EQUIV];
+#endif
/* TypeVar and related types */
PyTypeObject *generic_type;
diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h
index 7484b05d7f2..32b60cc33a2 100644
--- a/Include/internal/pycore_lock.h
+++ b/Include/internal/pycore_lock.h
@@ -48,6 +48,9 @@ typedef enum _PyLockFlags {
// Handle signals if interrupted while waiting on the lock.
_PY_LOCK_HANDLE_SIGNALS = 2,
+
+ // Fail if interrupted by a signal while waiting on the lock.
+ _PY_FAIL_IF_INTERRUPTED = 4,
} _PyLockFlags;
// Lock a mutex with an optional timeout and additional options. See
diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h
index 3fd56c346b9..cd1fc873623 100644
--- a/Include/internal/pycore_magic_number.h
+++ b/Include/internal/pycore_magic_number.h
@@ -279,6 +279,8 @@ Known values:
Python 3.14b1 3624 (Don't optimize LOAD_FAST when local is killed by DELETE_FAST)
Python 3.15a0 3650 (Initial version)
Python 3.15a1 3651 (Simplify LOAD_CONST)
+ Python 3.15a1 3652 (Virtual iterators)
+
Python 3.16 will start with 3700
@@ -291,7 +293,7 @@ PC/launcher.c must also be updated.
*/
-#define PYC_MAGIC_NUMBER 3651
+#define PYC_MAGIC_NUMBER 3652
/* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes
(little-endian) and then appending b'\r\n'. */
#define PYC_MAGIC_NUMBER_TOKEN \
diff --git a/Include/internal/pycore_modsupport.h b/Include/internal/pycore_modsupport.h
index 614e9f93751..d90f42e9cd8 100644
--- a/Include/internal/pycore_modsupport.h
+++ b/Include/internal/pycore_modsupport.h
@@ -27,9 +27,8 @@ PyAPI_FUNC(int) _PyArg_NoKeywords(const char *funcname, PyObject *kwargs);
// Export for 'zlib' shared extension
PyAPI_FUNC(int) _PyArg_CheckPositional(const char *, Py_ssize_t,
Py_ssize_t, Py_ssize_t);
-#define _Py_ANY_VARARGS(n) ((n) == PY_SSIZE_T_MAX)
#define _PyArg_CheckPositional(funcname, nargs, min, max) \
- ((!_Py_ANY_VARARGS(max) && (min) <= (nargs) && (nargs) <= (max)) \
+ (((min) <= (nargs) && (nargs) <= (max)) \
|| _PyArg_CheckPositional((funcname), (nargs), (min), (max)))
extern PyObject ** _Py_VaBuildStack(
diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h
index b7e162c8abc..50225623fe5 100644
--- a/Include/internal/pycore_object.h
+++ b/Include/internal/pycore_object.h
@@ -313,7 +313,7 @@ extern int _PyDict_CheckConsistency(PyObject *mp, int check_content);
// Fast inlined version of PyType_HasFeature()
static inline int
_PyType_HasFeature(PyTypeObject *type, unsigned long feature) {
- return ((FT_ATOMIC_LOAD_ULONG_RELAXED(type->tp_flags) & feature) != 0);
+ return ((type->tp_flags) & feature) != 0;
}
extern void _PyType_InitCache(PyInterpreterState *interp);
@@ -897,6 +897,9 @@ extern PyObject *_PyType_LookupRefAndVersion(PyTypeObject *, PyObject *,
extern unsigned int
_PyType_LookupStackRefAndVersion(PyTypeObject *type, PyObject *name, _PyStackRef *out);
+extern int _PyObject_GetMethodStackRef(PyThreadState *ts, PyObject *obj,
+ PyObject *name, _PyStackRef *method);
+
// Cache the provided init method in the specialization cache of type if the
// provided type version matches the current version of the type.
//
@@ -1007,6 +1010,8 @@ enum _PyAnnotateFormat {
_Py_ANNOTATE_FORMAT_STRING = 4,
};
+int _PyObject_SetDict(PyObject *obj, PyObject *value);
+
#ifdef __cplusplus
}
#endif
diff --git a/Include/internal/pycore_opcode_metadata.h b/Include/internal/pycore_opcode_metadata.h
index 39d6a912a54..00e918cb8f0 100644
--- a/Include/internal/pycore_opcode_metadata.h
+++ b/Include/internal/pycore_opcode_metadata.h
@@ -205,15 +205,15 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
case FORMAT_WITH_SPEC:
return 2;
case FOR_ITER:
- return 1;
+ return 2;
case FOR_ITER_GEN:
- return 1;
+ return 2;
case FOR_ITER_LIST:
- return 1;
+ return 2;
case FOR_ITER_RANGE:
- return 1;
+ return 2;
case FOR_ITER_TUPLE:
- return 1;
+ return 2;
case GET_AITER:
return 1;
case GET_ANEXT:
@@ -239,11 +239,11 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
case INSTRUMENTED_END_ASYNC_FOR:
return 2;
case INSTRUMENTED_END_FOR:
- return 2;
+ return 3;
case INSTRUMENTED_END_SEND:
return 2;
case INSTRUMENTED_FOR_ITER:
- return 1;
+ return 2;
case INSTRUMENTED_INSTRUCTION:
return 0;
case INSTRUMENTED_JUMP_BACKWARD:
@@ -257,7 +257,7 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
case INSTRUMENTED_NOT_TAKEN:
return 0;
case INSTRUMENTED_POP_ITER:
- return 1;
+ return 2;
case INSTRUMENTED_POP_JUMP_IF_FALSE:
return 1;
case INSTRUMENTED_POP_JUMP_IF_NONE:
@@ -395,7 +395,7 @@ int _PyOpcode_num_popped(int opcode, int oparg) {
case POP_EXCEPT:
return 1;
case POP_ITER:
- return 1;
+ return 2;
case POP_JUMP_IF_FALSE:
return 1;
case POP_JUMP_IF_NONE:
@@ -688,15 +688,15 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case FORMAT_WITH_SPEC:
return 1;
case FOR_ITER:
- return 2;
+ return 3;
case FOR_ITER_GEN:
- return 1;
- case FOR_ITER_LIST:
return 2;
+ case FOR_ITER_LIST:
+ return 3;
case FOR_ITER_RANGE:
- return 2;
+ return 3;
case FOR_ITER_TUPLE:
- return 2;
+ return 3;
case GET_AITER:
return 1;
case GET_ANEXT:
@@ -704,7 +704,7 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case GET_AWAITABLE:
return 1;
case GET_ITER:
- return 1;
+ return 2;
case GET_LEN:
return 2;
case GET_YIELD_FROM_ITER:
@@ -722,11 +722,11 @@ int _PyOpcode_num_pushed(int opcode, int oparg) {
case INSTRUMENTED_END_ASYNC_FOR:
return 0;
case INSTRUMENTED_END_FOR:
- return 1;
+ return 2;
case INSTRUMENTED_END_SEND:
return 1;
case INSTRUMENTED_FOR_ITER:
- return 2;
+ return 3;
case INSTRUMENTED_INSTRUCTION:
return 0;
case INSTRUMENTED_JUMP_BACKWARD:
@@ -1157,7 +1157,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[267] = {
[FOR_ITER_GEN] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_DEOPT_FLAG },
[FOR_ITER_LIST] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_DEOPT_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG },
[FOR_ITER_RANGE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG | HAS_ERROR_FLAG },
- [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG | HAS_ESCAPES_FLAG },
+ [FOR_ITER_TUPLE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_EXIT_FLAG },
[GET_AITER] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[GET_ANEXT] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG },
[GET_AWAITABLE] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
@@ -1242,7 +1242,7 @@ const struct opcode_metadata _PyOpcode_opcode_metadata[267] = {
[NOP] = { true, INSTR_FMT_IX, HAS_PURE_FLAG },
[NOT_TAKEN] = { true, INSTR_FMT_IX, HAS_PURE_FLAG },
[POP_EXCEPT] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG },
- [POP_ITER] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG | HAS_PURE_FLAG },
+ [POP_ITER] = { true, INSTR_FMT_IX, HAS_ESCAPES_FLAG },
[POP_JUMP_IF_FALSE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG },
[POP_JUMP_IF_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ESCAPES_FLAG },
[POP_JUMP_IF_NOT_NONE] = { true, INSTR_FMT_IBC, HAS_ARG_FLAG | HAS_JUMP_FLAG | HAS_ESCAPES_FLAG },
@@ -1453,7 +1453,7 @@ _PyOpcode_macro_expansion[256] = {
[NOP] = { .nuops = 1, .uops = { { _NOP, OPARG_SIMPLE, 0 } } },
[NOT_TAKEN] = { .nuops = 1, .uops = { { _NOP, OPARG_SIMPLE, 0 } } },
[POP_EXCEPT] = { .nuops = 1, .uops = { { _POP_EXCEPT, OPARG_SIMPLE, 0 } } },
- [POP_ITER] = { .nuops = 1, .uops = { { _POP_TOP, OPARG_SIMPLE, 0 } } },
+ [POP_ITER] = { .nuops = 1, .uops = { { _POP_ITER, OPARG_SIMPLE, 0 } } },
[POP_JUMP_IF_FALSE] = { .nuops = 1, .uops = { { _POP_JUMP_IF_FALSE, OPARG_REPLACED, 1 } } },
[POP_JUMP_IF_NONE] = { .nuops = 2, .uops = { { _IS_NONE, OPARG_SIMPLE, 1 }, { _POP_JUMP_IF_TRUE, OPARG_REPLACED, 1 } } },
[POP_JUMP_IF_NOT_NONE] = { .nuops = 2, .uops = { { _IS_NONE, OPARG_SIMPLE, 1 }, { _POP_JUMP_IF_FALSE, OPARG_REPLACED, 1 } } },
diff --git a/Include/internal/pycore_runtime_init_generated.h b/Include/internal/pycore_runtime_init_generated.h
index 0fa1fa5af99..d172cc1485d 100644
--- a/Include/internal/pycore_runtime_init_generated.h
+++ b/Include/internal/pycore_runtime_init_generated.h
@@ -890,6 +890,7 @@ extern "C" {
INIT_ID(data), \
INIT_ID(database), \
INIT_ID(day), \
+ INIT_ID(debug), \
INIT_ID(decode), \
INIT_ID(decoder), \
INIT_ID(default), \
diff --git a/Include/internal/pycore_stackref.h b/Include/internal/pycore_stackref.h
index 40ec00c8119..f2ecc30b053 100644
--- a/Include/internal/pycore_stackref.h
+++ b/Include/internal/pycore_stackref.h
@@ -232,6 +232,9 @@ extern intptr_t PyStackRef_UntagInt(_PyStackRef ref);
extern _PyStackRef PyStackRef_TagInt(intptr_t i);
+/* Increments a tagged int, but does not check for overflow */
+extern _PyStackRef PyStackRef_IncrementTaggedIntNoOverflow(_PyStackRef ref);
+
extern bool
PyStackRef_IsNullOrInt(_PyStackRef ref);
@@ -239,11 +242,12 @@ PyStackRef_IsNullOrInt(_PyStackRef ref);
#define Py_INT_TAG 3
#define Py_TAG_REFCNT 1
+#define Py_TAG_BITS 3
static inline bool
PyStackRef_IsTaggedInt(_PyStackRef i)
{
- return (i.bits & Py_INT_TAG) == Py_INT_TAG;
+ return (i.bits & Py_TAG_BITS) == Py_INT_TAG;
}
static inline _PyStackRef
@@ -262,12 +266,21 @@ PyStackRef_UntagInt(_PyStackRef i)
}
+static inline _PyStackRef
+PyStackRef_IncrementTaggedIntNoOverflow(_PyStackRef ref)
+{
+ assert((ref.bits & Py_TAG_BITS) == Py_INT_TAG); // Is tagged int
+ assert((ref.bits & (~Py_TAG_BITS)) != (INT_MAX & (~Py_TAG_BITS))); // Isn't about to overflow
+ return (_PyStackRef){ .bits = ref.bits + 4 };
+}
+
+#define PyStackRef_IsDeferredOrTaggedInt(ref) (((ref).bits & Py_TAG_REFCNT) != 0)
+
#ifdef Py_GIL_DISABLED
#define Py_TAG_DEFERRED Py_TAG_REFCNT
#define Py_TAG_PTR ((uintptr_t)0)
-#define Py_TAG_BITS ((uintptr_t)1)
static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED};
@@ -379,7 +392,7 @@ PyStackRef_FromPyObjectBorrow(PyObject *obj)
do { \
_PyStackRef _close_tmp = (REF); \
assert(!PyStackRef_IsNull(_close_tmp)); \
- if (!PyStackRef_IsDeferred(_close_tmp)) { \
+ if (!PyStackRef_IsDeferredOrTaggedInt(_close_tmp)) { \
Py_DECREF(PyStackRef_AsPyObjectBorrow(_close_tmp)); \
} \
} while (0)
@@ -395,7 +408,7 @@ static inline _PyStackRef
PyStackRef_DUP(_PyStackRef stackref)
{
assert(!PyStackRef_IsNull(stackref));
- if (PyStackRef_IsDeferred(stackref)) {
+ if (PyStackRef_IsDeferredOrTaggedInt(stackref)) {
return stackref;
}
Py_INCREF(PyStackRef_AsPyObjectBorrow(stackref));
@@ -442,7 +455,6 @@ PyStackRef_AsStrongReference(_PyStackRef stackref)
/* References to immortal objects always have their tag bit set to Py_TAG_REFCNT
* as they can (must) have their reclamation deferred */
-#define Py_TAG_BITS 3
#if _Py_IMMORTAL_FLAGS != Py_TAG_REFCNT
# error "_Py_IMMORTAL_FLAGS != Py_TAG_REFCNT"
#endif
@@ -678,7 +690,13 @@ PyStackRef_XCLOSE(_PyStackRef ref)
#endif // !defined(Py_GIL_DISABLED) && defined(Py_STACKREF_DEBUG)
-#define PyStackRef_TYPE(stackref) Py_TYPE(PyStackRef_AsPyObjectBorrow(stackref))
+static inline PyTypeObject *
+PyStackRef_TYPE(_PyStackRef stackref) {
+ if (PyStackRef_IsTaggedInt(stackref)) {
+ return &PyLong_Type;
+ }
+ return Py_TYPE(PyStackRef_AsPyObjectBorrow(stackref));
+}
// Converts a PyStackRef back to a PyObject *, converting the
// stackref to a new reference.
@@ -686,42 +704,30 @@ PyStackRef_XCLOSE(_PyStackRef ref)
// StackRef type checks
-static inline bool
-PyStackRef_GenCheck(_PyStackRef stackref)
-{
- return PyGen_Check(PyStackRef_AsPyObjectBorrow(stackref));
-}
+#define STACKREF_CHECK_FUNC(T) \
+ static inline bool \
+ PyStackRef_ ## T ## Check(_PyStackRef stackref) { \
+ if (PyStackRef_IsTaggedInt(stackref)) { \
+ return false; \
+ } \
+ return Py ## T ## _Check(PyStackRef_AsPyObjectBorrow(stackref)); \
+ }
-static inline bool
-PyStackRef_BoolCheck(_PyStackRef stackref)
-{
- return PyBool_Check(PyStackRef_AsPyObjectBorrow(stackref));
-}
+STACKREF_CHECK_FUNC(Gen)
+STACKREF_CHECK_FUNC(Bool)
+STACKREF_CHECK_FUNC(ExceptionInstance)
+STACKREF_CHECK_FUNC(Code)
+STACKREF_CHECK_FUNC(Function)
static inline bool
PyStackRef_LongCheck(_PyStackRef stackref)
{
+ if (PyStackRef_IsTaggedInt(stackref)) {
+ return true;
+ }
return PyLong_Check(PyStackRef_AsPyObjectBorrow(stackref));
}
-static inline bool
-PyStackRef_ExceptionInstanceCheck(_PyStackRef stackref)
-{
- return PyExceptionInstance_Check(PyStackRef_AsPyObjectBorrow(stackref));
-}
-
-static inline bool
-PyStackRef_CodeCheck(_PyStackRef stackref)
-{
- return PyCode_Check(PyStackRef_AsPyObjectBorrow(stackref));
-}
-
-static inline bool
-PyStackRef_FunctionCheck(_PyStackRef stackref)
-{
- return PyFunction_Check(PyStackRef_AsPyObjectBorrow(stackref));
-}
-
static inline void
_PyThreadState_PushCStackRef(PyThreadState *tstate, _PyCStackRef *ref)
{
diff --git a/Include/internal/pycore_sysmodule.h b/Include/internal/pycore_sysmodule.h
index 008a2da0d04..347b0a7a790 100644
--- a/Include/internal/pycore_sysmodule.h
+++ b/Include/internal/pycore_sysmodule.h
@@ -8,11 +8,6 @@ extern "C" {
# error "this header requires Py_BUILD_CORE define"
#endif
-PyAPI_FUNC(int) _PySys_GetOptionalAttr(PyObject *, PyObject **);
-PyAPI_FUNC(int) _PySys_GetOptionalAttrString(const char *, PyObject **);
-PyAPI_FUNC(PyObject *) _PySys_GetRequiredAttr(PyObject *);
-PyAPI_FUNC(PyObject *) _PySys_GetRequiredAttrString(const char *);
-
// Export for '_pickle' shared extension
PyAPI_FUNC(size_t) _PySys_GetSizeOf(PyObject *);
diff --git a/Include/internal/pycore_typeobject.h b/Include/internal/pycore_typeobject.h
index 1a4f89fd244..0ee7d555c56 100644
--- a/Include/internal/pycore_typeobject.h
+++ b/Include/internal/pycore_typeobject.h
@@ -134,7 +134,6 @@ extern int _PyType_AddMethod(PyTypeObject *, PyMethodDef *);
extern void _PyType_SetFlagsRecursive(PyTypeObject *self, unsigned long mask,
unsigned long flags);
-extern unsigned int _PyType_GetVersionForCurrentState(PyTypeObject *tp);
PyAPI_FUNC(void) _PyType_SetVersion(PyTypeObject *tp, unsigned int version);
PyTypeObject *_PyType_LookupByVersion(unsigned int version);
diff --git a/Include/internal/pycore_unicodeobject_generated.h b/Include/internal/pycore_unicodeobject_generated.h
index 4982c4532af..0a9be4e41ac 100644
--- a/Include/internal/pycore_unicodeobject_generated.h
+++ b/Include/internal/pycore_unicodeobject_generated.h
@@ -1320,6 +1320,10 @@ _PyUnicode_InitStaticStrings(PyInterpreterState *interp) {
_PyUnicode_InternStatic(interp, &string);
assert(_PyUnicode_CheckConsistency(string, 1));
assert(PyUnicode_GET_LENGTH(string) != 1);
+ string = &_Py_ID(debug);
+ _PyUnicode_InternStatic(interp, &string);
+ assert(_PyUnicode_CheckConsistency(string, 1));
+ assert(PyUnicode_GET_LENGTH(string) != 1);
string = &_Py_ID(decode);
_PyUnicode_InternStatic(interp, &string);
assert(_PyUnicode_CheckConsistency(string, 1));
diff --git a/Include/internal/pycore_uop_ids.h b/Include/internal/pycore_uop_ids.h
index d08799487fd..2b845527cf2 100644
--- a/Include/internal/pycore_uop_ids.h
+++ b/Include/internal/pycore_uop_ids.h
@@ -272,6 +272,7 @@ extern "C" {
#define _POP_CALL_TWO 489
#define _POP_CALL_TWO_LOAD_CONST_INLINE_BORROW 490
#define _POP_EXCEPT POP_EXCEPT
+#define _POP_ITER POP_ITER
#define _POP_JUMP_IF_FALSE 491
#define _POP_JUMP_IF_TRUE 492
#define _POP_TOP POP_TOP
diff --git a/Include/internal/pycore_uop_metadata.h b/Include/internal/pycore_uop_metadata.h
index 5ebe124983b..b08909e72c4 100644
--- a/Include/internal/pycore_uop_metadata.h
+++ b/Include/internal/pycore_uop_metadata.h
@@ -66,6 +66,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_POP_TWO] = HAS_ESCAPES_FLAG,
[_PUSH_NULL] = HAS_PURE_FLAG,
[_END_FOR] = HAS_ESCAPES_FLAG | HAS_NO_SAVE_IP_FLAG,
+ [_POP_ITER] = HAS_ESCAPES_FLAG,
[_END_SEND] = HAS_ESCAPES_FLAG | HAS_PURE_FLAG,
[_UNARY_NEGATIVE] = HAS_ERROR_FLAG | HAS_ESCAPES_FLAG,
[_UNARY_NOT] = HAS_PURE_FLAG,
@@ -205,7 +206,7 @@ const uint16_t _PyUop_Flags[MAX_UOP_ID+1] = {
[_FOR_ITER_TIER_TWO] = HAS_EXIT_FLAG | HAS_ERROR_FLAG | HAS_ERROR_NO_POP_FLAG | HAS_ESCAPES_FLAG,
[_ITER_CHECK_LIST] = HAS_EXIT_FLAG,
[_GUARD_NOT_EXHAUSTED_LIST] = HAS_EXIT_FLAG,
- [_ITER_NEXT_LIST_TIER_TWO] = HAS_EXIT_FLAG | HAS_ESCAPES_FLAG,
+ [_ITER_NEXT_LIST_TIER_TWO] = HAS_DEOPT_FLAG | HAS_ESCAPES_FLAG,
[_ITER_CHECK_TUPLE] = HAS_EXIT_FLAG,
[_GUARD_NOT_EXHAUSTED_TUPLE] = HAS_EXIT_FLAG,
[_ITER_NEXT_TUPLE] = 0,
@@ -569,6 +570,7 @@ const char *const _PyOpcode_uop_name[MAX_UOP_ID+1] = {
[_POP_CALL_TWO] = "_POP_CALL_TWO",
[_POP_CALL_TWO_LOAD_CONST_INLINE_BORROW] = "_POP_CALL_TWO_LOAD_CONST_INLINE_BORROW",
[_POP_EXCEPT] = "_POP_EXCEPT",
+ [_POP_ITER] = "_POP_ITER",
[_POP_TOP] = "_POP_TOP",
[_POP_TOP_LOAD_CONST_INLINE] = "_POP_TOP_LOAD_CONST_INLINE",
[_POP_TOP_LOAD_CONST_INLINE_BORROW] = "_POP_TOP_LOAD_CONST_INLINE_BORROW",
@@ -730,6 +732,8 @@ int _PyUop_num_popped(int opcode, int oparg)
return 0;
case _END_FOR:
return 1;
+ case _POP_ITER:
+ return 2;
case _END_SEND:
return 2;
case _UNARY_NEGATIVE:
diff --git a/Include/object.h b/Include/object.h
index 8cc83abb857..994cac1ad17 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -620,6 +620,12 @@ given type object has a specified feature.
#define Py_TPFLAGS_HAVE_FINALIZE (1UL << 0)
#define Py_TPFLAGS_HAVE_VERSION_TAG (1UL << 18)
+// Flag values for ob_flags (16 bits available, if SIZEOF_VOID_P > 4).
+#define _Py_IMMORTAL_FLAGS (1 << 0)
+#define _Py_STATICALLY_ALLOCATED_FLAG (1 << 2)
+#if defined(Py_GIL_DISABLED) && defined(Py_DEBUG)
+#define _Py_TYPE_REVEALED_FLAG (1 << 3)
+#endif
#define Py_CONSTANT_NONE 0
#define Py_CONSTANT_FALSE 1
@@ -776,11 +782,7 @@ PyType_HasFeature(PyTypeObject *type, unsigned long feature)
// PyTypeObject is opaque in the limited C API
flags = PyType_GetFlags(type);
#else
-# ifdef Py_GIL_DISABLED
- flags = _Py_atomic_load_ulong_relaxed(&type->tp_flags);
-# else
- flags = type->tp_flags;
-# endif
+ flags = type->tp_flags;
#endif
return ((flags & feature) != 0);
}
diff --git a/Include/pymacro.h b/Include/pymacro.h
index 218987a80b0..d410645034d 100644
--- a/Include/pymacro.h
+++ b/Include/pymacro.h
@@ -231,12 +231,13 @@
// "comparison of unsigned expression in '< 0' is always false".
#define _Py_IS_TYPE_SIGNED(type) ((type)(-1) <= 0)
-#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030E0000 // 3.14
// Version helpers. These are primarily macros, but have exported equivalents.
+#define _Py_PACK_VERSION(X, Y) _Py_PACK_FULL_VERSION(X, Y, 0, 0, 0)
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= _Py_PACK_VERSION(3, 14)
PyAPI_FUNC(uint32_t) Py_PACK_FULL_VERSION(int x, int y, int z, int level, int serial);
PyAPI_FUNC(uint32_t) Py_PACK_VERSION(int x, int y);
#define Py_PACK_FULL_VERSION _Py_PACK_FULL_VERSION
-#define Py_PACK_VERSION(X, Y) Py_PACK_FULL_VERSION(X, Y, 0, 0, 0)
+#define Py_PACK_VERSION _Py_PACK_VERSION
#endif // Py_LIMITED_API < 3.14
diff --git a/Include/refcount.h b/Include/refcount.h
index 177bbdaf0c5..ebd1dba6d15 100644
--- a/Include/refcount.h
+++ b/Include/refcount.h
@@ -19,9 +19,6 @@ immortal. The latter should be the only instances that require
cleanup during runtime finalization.
*/
-#define _Py_STATICALLY_ALLOCATED_FLAG 4
-#define _Py_IMMORTAL_FLAGS 1
-
#if SIZEOF_VOID_P > 4
/*
In 64+ bit systems, any object whose 32 bit reference count is >= 2**31
diff --git a/Include/sysmodule.h b/Include/sysmodule.h
index c1d5f610fe0..2f362791797 100644
--- a/Include/sysmodule.h
+++ b/Include/sysmodule.h
@@ -4,6 +4,12 @@
extern "C" {
#endif
+#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x030f0000
+PyAPI_FUNC(PyObject *) PySys_GetAttr(PyObject *);
+PyAPI_FUNC(PyObject *) PySys_GetAttrString(const char *);
+PyAPI_FUNC(int) PySys_GetOptionalAttr(PyObject *, PyObject **);
+PyAPI_FUNC(int) PySys_GetOptionalAttrString(const char *, PyObject **);
+#endif
PyAPI_FUNC(PyObject *) PySys_GetObject(const char *);
PyAPI_FUNC(int) PySys_SetObject(const char *, PyObject *);
diff --git a/InternalDocs/exception_handling.md b/InternalDocs/exception_handling.md
index 28589787e1f..9e38da4c862 100644
--- a/InternalDocs/exception_handling.md
+++ b/InternalDocs/exception_handling.md
@@ -8,7 +8,7 @@ The cost of raising an exception is increased, but not by much.
The following code:
-```
+```python
try:
g(0)
except:
@@ -18,7 +18,7 @@ except:
compiles into intermediate code like the following:
-```
+```python
RESUME 0
1 SETUP_FINALLY 8 (to L1)
@@ -118,13 +118,13 @@ All offsets and lengths are in code units, not bytes.
We want the format to be compact, but quickly searchable.
For it to be compact, it needs to have variable sized entries so that we can store common (small) offsets compactly, but handle large offsets if needed.
-For it to be searchable quickly, we need to support binary search giving us log(n) performance in all cases.
+For it to be searchable quickly, we need to support binary search giving us `log(n)` performance in all cases.
Binary search typically assumes fixed size entries, but that is not necessary, as long as we can identify the start of an entry.
It is worth noting that the size (end-start) is always smaller than the end, so we encode the entries as:
`start, size, target, depth, push-lasti`.
-Also, sizes are limited to 2**30 as the code length cannot exceed 2**31 and each code unit takes 2 bytes.
+Also, sizes are limited to `2**30` as the code length cannot exceed `2**31` and each code unit takes 2 bytes.
It also happens that depth is generally quite small.
So, we need to encode:
@@ -140,7 +140,7 @@ lasti (1 bit)
We need a marker for the start of the entry, so the first byte of entry will have the most significant bit set.
Since the most significant bit is reserved for marking the start of an entry, we have 7 bits per byte to encode offsets.
Encoding uses a standard varint encoding, but with only 7 bits instead of the usual 8.
-The 8 bits of a byte are (msb left) SXdddddd where S is the start bit. X is the extend bit meaning that the next byte is required to extend the offset.
+The 8 bits of a byte are (msb left) `SXdddddd` where `S` is the start bit. `X` is the extend bit meaning that the next byte is required to extend the offset.
In addition, we combine `depth` and `lasti` into a single value, `((depth<<1)+lasti)`, before encoding.
diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py
index 46fa9ffcb1e..781b38ec26b 100644
--- a/Lib/_pydecimal.py
+++ b/Lib/_pydecimal.py
@@ -6120,9 +6120,9 @@ _parse_format_specifier_regex = re.compile(r"""\A
(?P<no_neg_0>z)?
(?P<alt>\#)?
(?P<zeropad>0)?
-(?P<minimumwidth>(?!0)\d+)?
+(?P<minimumwidth>\d+)?
(?P<thousands_sep>[,_])?
-(?:\.(?P<precision>0|(?!0)\d+))?
+(?:\.(?P<precision>\d+))?
(?P<type>[eEfFgGn%])?
\z
""", re.VERBOSE|re.DOTALL)
diff --git a/Lib/_pyrepl/_module_completer.py b/Lib/_pyrepl/_module_completer.py
index 494a501101a..1e9462a4215 100644
--- a/Lib/_pyrepl/_module_completer.py
+++ b/Lib/_pyrepl/_module_completer.py
@@ -42,11 +42,11 @@ class ModuleCompleter:
self._global_cache: list[pkgutil.ModuleInfo] = []
self._curr_sys_path: list[str] = sys.path[:]
- def get_completions(self, line: str) -> list[str]:
+ def get_completions(self, line: str) -> list[str] | None:
"""Return the next possible import completions for 'line'."""
result = ImportParser(line).parse()
if not result:
- return []
+ return None
try:
return self.complete(*result)
except Exception:
diff --git a/Lib/_pyrepl/readline.py b/Lib/_pyrepl/readline.py
index 572eee520e5..9560ae779ab 100644
--- a/Lib/_pyrepl/readline.py
+++ b/Lib/_pyrepl/readline.py
@@ -134,7 +134,8 @@ class ReadlineAlikeReader(historical_reader.HistoricalReader, CompletingReader):
return "".join(b[p + 1 : self.pos])
def get_completions(self, stem: str) -> list[str]:
- if module_completions := self.get_module_completions():
+ module_completions = self.get_module_completions()
+ if module_completions is not None:
return module_completions
if len(stem) == 0 and self.more_lines is not None:
b = self.buffer
@@ -165,7 +166,7 @@ class ReadlineAlikeReader(historical_reader.HistoricalReader, CompletingReader):
result.sort()
return result
- def get_module_completions(self) -> list[str]:
+ def get_module_completions(self) -> list[str] | None:
line = self.get_line()
return self.config.module_completer.get_completions(line)
diff --git a/Lib/argparse.py b/Lib/argparse.py
index d1a6350c3fd..83258cf3e0f 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -1534,7 +1534,7 @@ class _ActionsContainer(object):
action_name = kwargs.get('action')
action_class = self._pop_action_class(kwargs)
if not callable(action_class):
- raise ValueError('unknown action {action_class!r}')
+ raise ValueError(f'unknown action {action_class!r}')
action = action_class(**kwargs)
# raise an error if action for positional argument does not
diff --git a/Lib/ast.py b/Lib/ast.py
index b9791bf52d3..6d3daf64f5c 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -147,18 +147,22 @@ def dump(
if value is None and getattr(cls, name, ...) is None:
keywords = True
continue
- if (
- not show_empty
- and (value is None or value == [])
- # Special cases:
- # `Constant(value=None)` and `MatchSingleton(value=None)`
- and not isinstance(node, (Constant, MatchSingleton))
- ):
- args_buffer.append(repr(value))
- continue
- elif not keywords:
- args.extend(args_buffer)
- args_buffer = []
+ if not show_empty:
+ if value == []:
+ field_type = cls._field_types.get(name, object)
+ if getattr(field_type, '__origin__', ...) is list:
+ if not keywords:
+ args_buffer.append(repr(value))
+ continue
+ elif isinstance(value, Load):
+ field_type = cls._field_types.get(name, object)
+ if field_type is expr_context:
+ if not keywords:
+ args_buffer.append(repr(value))
+ continue
+ if not keywords:
+ args.extend(args_buffer)
+ args_buffer = []
value, simple = _format(value, level)
allsimple = allsimple and simple
if keywords:
diff --git a/Lib/code.py b/Lib/code.py
index b134886dc26..f7e275d8801 100644
--- a/Lib/code.py
+++ b/Lib/code.py
@@ -224,7 +224,7 @@ class InteractiveConsole(InteractiveInterpreter):
sys.ps1 = ">>> "
delete_ps1_after = True
try:
- _ps2 = sys.ps2
+ sys.ps2
delete_ps2_after = False
except AttributeError:
sys.ps2 = "... "
diff --git a/Lib/compression/zstd/_zstdfile.py b/Lib/compression/zstd/_zstdfile.py
index 8770e576f50..d709f5efc65 100644
--- a/Lib/compression/zstd/_zstdfile.py
+++ b/Lib/compression/zstd/_zstdfile.py
@@ -1,7 +1,6 @@
import io
from os import PathLike
-from _zstd import (ZstdCompressor, ZstdDecompressor, ZstdError,
- ZSTD_DStreamOutSize)
+from _zstd import ZstdCompressor, ZstdDecompressor, ZSTD_DStreamOutSize
from compression._common import _streams
__all__ = ('ZstdFile', 'open')
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index 823a3692fd1..d6d07a13f75 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -379,12 +379,6 @@ def create_unicode_buffer(init, size=None):
return buf
raise TypeError(init)
-
-def SetPointerType(pointer, cls):
- import warnings
- warnings._deprecated("ctypes.SetPointerType", remove=(3, 15))
- pointer.set_type(cls)
-
def ARRAY(typ, len):
return typ * len
diff --git a/Lib/dbm/dumb.py b/Lib/dbm/dumb.py
index def120ffc37..1bc239a84ff 100644
--- a/Lib/dbm/dumb.py
+++ b/Lib/dbm/dumb.py
@@ -9,7 +9,7 @@ XXX TO DO:
- seems to contain a bug when updating...
- reclaim free space (currently, space once occupied by deleted or expanded
-items is never reused)
+items is not reused exept if .reorganize() is called)
- support concurrent access (currently, if two processes take turns making
updates, they can mess up the index)
@@ -17,8 +17,6 @@ updates, they can mess up the index)
- support efficient access to large databases (currently, the whole index
is read when the database is opened, and some updates rewrite the whole index)
-- support opening for read-only (flag = 'm')
-
"""
import ast as _ast
@@ -289,6 +287,34 @@ class _Database(collections.abc.MutableMapping):
def __exit__(self, *args):
self.close()
+ def reorganize(self):
+ if self._readonly:
+ raise error('The database is opened for reading only')
+ self._verify_open()
+ # Ensure all changes are committed before reorganizing.
+ self._commit()
+ # Open file in r+ to allow changing in-place.
+ with _io.open(self._datfile, 'rb+') as f:
+ reorganize_pos = 0
+
+ # Iterate over existing keys, sorted by starting byte.
+ for key in sorted(self._index, key = lambda k: self._index[k][0]):
+ pos, siz = self._index[key]
+ f.seek(pos)
+ val = f.read(siz)
+
+ f.seek(reorganize_pos)
+ f.write(val)
+ self._index[key] = (reorganize_pos, siz)
+
+ blocks_occupied = (siz + _BLOCKSIZE - 1) // _BLOCKSIZE
+ reorganize_pos += blocks_occupied * _BLOCKSIZE
+
+ f.truncate(reorganize_pos)
+ # Commit changes to index, which were not in-place.
+ self._commit()
+
+
def open(file, flag='c', mode=0o666):
"""Open the database file, filename, and return corresponding object.
diff --git a/Lib/dbm/sqlite3.py b/Lib/dbm/sqlite3.py
index 7e0ae2a29e3..b296a1bcd1b 100644
--- a/Lib/dbm/sqlite3.py
+++ b/Lib/dbm/sqlite3.py
@@ -15,6 +15,7 @@ LOOKUP_KEY = "SELECT value FROM Dict WHERE key = CAST(? AS BLOB)"
STORE_KV = "REPLACE INTO Dict (key, value) VALUES (CAST(? AS BLOB), CAST(? AS BLOB))"
DELETE_KEY = "DELETE FROM Dict WHERE key = CAST(? AS BLOB)"
ITER_KEYS = "SELECT key FROM Dict"
+REORGANIZE = "VACUUM"
class error(OSError):
@@ -122,6 +123,9 @@ class _Database(MutableMapping):
def __exit__(self, *args):
self.close()
+ def reorganize(self):
+ self._execute(REORGANIZE)
+
def open(filename, /, flag="r", mode=0o666):
"""Open a dbm.sqlite3 database and return the dbm object.
diff --git a/Lib/difflib.py b/Lib/difflib.py
index f1f4e62514a..18801a9b19e 100644
--- a/Lib/difflib.py
+++ b/Lib/difflib.py
@@ -1615,16 +1615,13 @@ def _mdiff(fromlines, tolines, context=None, linejunk=None,
_file_template = """
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
-
-<html>
-
+<!DOCTYPE html>
+<html lang="en">
<head>
- <meta http-equiv="Content-Type"
- content="text/html; charset=%(charset)s" />
- <title></title>
- <style type="text/css">%(styles)s
+ <meta charset="%(charset)s">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>Diff comparison</title>
+ <style>%(styles)s
</style>
</head>
@@ -1636,13 +1633,36 @@ _file_template = """
_styles = """
:root {color-scheme: light dark}
- table.diff {font-family: Menlo, Consolas, Monaco, Liberation Mono, Lucida Console, monospace; border:medium}
- .diff_header {background-color:#e0e0e0}
- td.diff_header {text-align:right}
- .diff_next {background-color:#c0c0c0}
+ table.diff {
+ font-family: Menlo, Consolas, Monaco, Liberation Mono, Lucida Console, monospace;
+ border: medium;
+ }
+ .diff_header {
+ background-color: #e0e0e0;
+ font-weight: bold;
+ }
+ td.diff_header {
+ text-align: right;
+ padding: 0 8px;
+ }
+ .diff_next {
+ background-color: #c0c0c0;
+ padding: 4px 0;
+ }
.diff_add {background-color:palegreen}
.diff_chg {background-color:#ffff77}
.diff_sub {background-color:#ffaaaa}
+ table.diff[summary="Legends"] {
+ margin-top: 20px;
+ border: 1px solid #ccc;
+ }
+ table.diff[summary="Legends"] th {
+ background-color: #e0e0e0;
+ padding: 4px 8px;
+ }
+ table.diff[summary="Legends"] td {
+ padding: 4px 8px;
+ }
@media (prefers-color-scheme: dark) {
.diff_header {background-color:#666}
@@ -1650,6 +1670,8 @@ _styles = """
.diff_add {background-color:darkgreen}
.diff_chg {background-color:#847415}
.diff_sub {background-color:darkred}
+ table.diff[summary="Legends"] {border-color:#555}
+ table.diff[summary="Legends"] th{background-color:#666}
}"""
_table_template = """
@@ -1692,7 +1714,7 @@ class HtmlDiff(object):
make_table -- generates HTML for a single side by side table
make_file -- generates complete HTML file with a single side by side table
- See tools/scripts/diff.py for an example usage of this class.
+ See Doc/includes/diff.py for an example usage of this class.
"""
_file_template = _file_template
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 2acb6cb79f3..c8c95ecbb27 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -101,6 +101,7 @@ import pdb
import re
import sys
import traceback
+import types
import unittest
from io import StringIO, IncrementalNewlineDecoder
from collections import namedtuple
@@ -385,7 +386,7 @@ class _OutputRedirectingPdb(pdb.Pdb):
self.__out = out
self.__debugger_used = False
# do not play signal games in the pdb
- pdb.Pdb.__init__(self, stdout=out, nosigint=True)
+ super().__init__(stdout=out, nosigint=True)
# still use input() to get user input
self.use_rawinput = 1
@@ -1278,6 +1279,11 @@ class DocTestRunner:
# Reporting methods
#/////////////////////////////////////////////////////////////////
+ def report_skip(self, out, test, example):
+ """
+ Report that the given example was skipped.
+ """
+
def report_start(self, out, test, example):
"""
Report that the test runner is about to process the given
@@ -1375,6 +1381,8 @@ class DocTestRunner:
# If 'SKIP' is set, then skip this example.
if self.optionflags & SKIP:
+ if not quiet:
+ self.report_skip(out, test, example)
skips += 1
continue
@@ -1395,11 +1403,11 @@ class DocTestRunner:
exec(compile(example.source, filename, "single",
compileflags, True), test.globs)
self.debugger.set_continue() # ==== Example Finished ====
- exception = None
+ exc_info = None
except KeyboardInterrupt:
raise
- except:
- exception = sys.exc_info()
+ except BaseException as exc:
+ exc_info = type(exc), exc, exc.__traceback__.tb_next
self.debugger.set_continue() # ==== Example Finished ====
got = self._fakeout.getvalue() # the actual output
@@ -1408,21 +1416,21 @@ class DocTestRunner:
# If the example executed without raising any exceptions,
# verify its output.
- if exception is None:
+ if exc_info is None:
if check(example.want, got, self.optionflags):
outcome = SUCCESS
# The example raised an exception: check if it was expected.
else:
- formatted_ex = traceback.format_exception_only(*exception[:2])
- if issubclass(exception[0], SyntaxError):
+ formatted_ex = traceback.format_exception_only(*exc_info[:2])
+ if issubclass(exc_info[0], SyntaxError):
# SyntaxError / IndentationError is special:
# we don't care about the carets / suggestions / etc
# We only care about the error message and notes.
# They start with `SyntaxError:` (or any other class name)
exception_line_prefixes = (
- f"{exception[0].__qualname__}:",
- f"{exception[0].__module__}.{exception[0].__qualname__}:",
+ f"{exc_info[0].__qualname__}:",
+ f"{exc_info[0].__module__}.{exc_info[0].__qualname__}:",
)
exc_msg_index = next(
index
@@ -1433,7 +1441,7 @@ class DocTestRunner:
exc_msg = "".join(formatted_ex)
if not quiet:
- got += _exception_traceback(exception)
+ got += _exception_traceback(exc_info)
# If `example.exc_msg` is None, then we weren't expecting
# an exception.
@@ -1462,7 +1470,7 @@ class DocTestRunner:
elif outcome is BOOM:
if not quiet:
self.report_unexpected_exception(out, test, example,
- exception)
+ exc_info)
failures += 1
else:
assert False, ("unknown outcome", outcome)
@@ -2272,12 +2280,63 @@ def set_unittest_reportflags(flags):
return old
+class _DocTestCaseRunner(DocTestRunner):
+
+ def __init__(self, *args, test_case, test_result, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._test_case = test_case
+ self._test_result = test_result
+ self._examplenum = 0
+
+ def _subTest(self):
+ subtest = unittest.case._SubTest(self._test_case, str(self._examplenum), {})
+ self._examplenum += 1
+ return subtest
+
+ def report_skip(self, out, test, example):
+ unittest.case._addSkip(self._test_result, self._subTest(), '')
+
+ def report_success(self, out, test, example, got):
+ self._test_result.addSubTest(self._test_case, self._subTest(), None)
+
+ def report_unexpected_exception(self, out, test, example, exc_info):
+ tb = self._add_traceback(exc_info[2], test, example)
+ exc_info = (*exc_info[:2], tb)
+ self._test_result.addSubTest(self._test_case, self._subTest(), exc_info)
+
+ def report_failure(self, out, test, example, got):
+ msg = ('Failed example:\n' + _indent(example.source) +
+ self._checker.output_difference(example, got, self.optionflags).rstrip('\n'))
+ exc = self._test_case.failureException(msg)
+ tb = self._add_traceback(None, test, example)
+ exc_info = (type(exc), exc, tb)
+ self._test_result.addSubTest(self._test_case, self._subTest(), exc_info)
+
+ def _add_traceback(self, traceback, test, example):
+ if test.lineno is None or example.lineno is None:
+ lineno = None
+ else:
+ lineno = test.lineno + example.lineno + 1
+ return types.SimpleNamespace(
+ tb_frame = types.SimpleNamespace(
+ f_globals=test.globs,
+ f_code=types.SimpleNamespace(
+ co_filename=test.filename,
+ co_name=test.name,
+ ),
+ ),
+ tb_next = traceback,
+ tb_lasti = -1,
+ tb_lineno = lineno,
+ )
+
+
class DocTestCase(unittest.TestCase):
def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
checker=None):
- unittest.TestCase.__init__(self)
+ super().__init__()
self._dt_optionflags = optionflags
self._dt_checker = checker
self._dt_test = test
@@ -2301,30 +2360,28 @@ class DocTestCase(unittest.TestCase):
test.globs.clear()
test.globs.update(self._dt_globs)
+ def run(self, result=None):
+ self._test_result = result
+ return super().run(result)
+
def runTest(self):
test = self._dt_test
- old = sys.stdout
- new = StringIO()
optionflags = self._dt_optionflags
+ result = self._test_result
if not (optionflags & REPORTING_FLAGS):
# The option flags don't include any reporting flags,
# so add the default reporting flags
optionflags |= _unittest_reportflags
+ if getattr(result, 'failfast', False):
+ optionflags |= FAIL_FAST
- runner = DocTestRunner(optionflags=optionflags,
- checker=self._dt_checker, verbose=False)
-
- try:
- runner.DIVIDER = "-"*70
- results = runner.run(test, out=new.write, clear_globs=False)
- if results.skipped == results.attempted:
- raise unittest.SkipTest("all examples were skipped")
- finally:
- sys.stdout = old
-
- if results.failed:
- raise self.failureException(self.format_failure(new.getvalue()))
+ runner = _DocTestCaseRunner(optionflags=optionflags,
+ checker=self._dt_checker, verbose=False,
+ test_case=self, test_result=result)
+ results = runner.run(test, clear_globs=False)
+ if results.skipped == results.attempted:
+ raise unittest.SkipTest("all examples were skipped")
def format_failure(self, err):
test = self._dt_test
@@ -2439,7 +2496,7 @@ class DocTestCase(unittest.TestCase):
class SkipDocTestCase(DocTestCase):
def __init__(self, module):
self.module = module
- DocTestCase.__init__(self, None)
+ super().__init__(None)
def setUp(self):
self.skipTest("DocTestSuite will not work with -O2 and above")
diff --git a/Lib/fractions.py b/Lib/fractions.py
index 063f28478c7..cb05ae7c200 100644
--- a/Lib/fractions.py
+++ b/Lib/fractions.py
@@ -168,9 +168,9 @@ _FLOAT_FORMAT_SPECIFICATION_MATCHER = re.compile(r"""
# A '0' that's *not* followed by another digit is parsed as a minimum width
# rather than a zeropad flag.
(?P<zeropad>0(?=[0-9]))?
- (?P<minimumwidth>0|[1-9][0-9]*)?
+ (?P<minimumwidth>[0-9]+)?
(?P<thousands_sep>[,_])?
- (?:\.(?P<precision>0|[1-9][0-9]*))?
+ (?:\.(?P<precision>[0-9]+))?
(?P<presentation_type>[eEfFgG%])
""", re.DOTALL | re.VERBOSE).fullmatch
diff --git a/Lib/genericpath.py b/Lib/genericpath.py
index ba7b0a13c7f..9363f564aab 100644
--- a/Lib/genericpath.py
+++ b/Lib/genericpath.py
@@ -8,7 +8,7 @@ import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdevdrive', 'isdir', 'isfile', 'isjunction', 'islink',
- 'lexists', 'samefile', 'sameopenfile', 'samestat']
+ 'lexists', 'samefile', 'sameopenfile', 'samestat', 'ALLOW_MISSING']
# Does a path exist?
@@ -189,3 +189,12 @@ def _check_arg_types(funcname, *args):
f'os.PathLike object, not {s.__class__.__name__!r}') from None
if hasstr and hasbytes:
raise TypeError("Can't mix strings and bytes in path components") from None
+
+# A singleton with a true boolean value.
+@object.__new__
+class ALLOW_MISSING:
+ """Special value for use in realpath()."""
+ def __repr__(self):
+ return 'os.path.ALLOW_MISSING'
+ def __reduce__(self):
+ return self.__class__.__name__
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index abacac22ea0..0e9bd98aa1f 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -141,29 +141,29 @@ def __get_openssl_constructor(name):
return __get_builtin_constructor(name)
-def __py_new(name, data=b'', **kwargs):
+def __py_new(name, *args, **kwargs):
"""new(name, data=b'', **kwargs) - Return a new hashing object using the
named algorithm; optionally initialized with data (which must be
a bytes-like object).
"""
- return __get_builtin_constructor(name)(data, **kwargs)
+ return __get_builtin_constructor(name)(*args, **kwargs)
-def __hash_new(name, data=b'', **kwargs):
+def __hash_new(name, *args, **kwargs):
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be a bytes-like object).
"""
if name in __block_openssl_constructor:
# Prefer our builtin blake2 implementation.
- return __get_builtin_constructor(name)(data, **kwargs)
+ return __get_builtin_constructor(name)(*args, **kwargs)
try:
- return _hashlib.new(name, data, **kwargs)
+ return _hashlib.new(name, *args, **kwargs)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
# This allows for SHA224/256 and SHA384/512 support even though
# the OpenSSL library prior to 0.9.8 doesn't provide them.
- return __get_builtin_constructor(name)(data)
+ return __get_builtin_constructor(name)(*args, **kwargs)
try:
diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt
index 74d84b38931..30784578cc6 100644
--- a/Lib/idlelib/News3.txt
+++ b/Lib/idlelib/News3.txt
@@ -4,6 +4,13 @@ Released on 2025-10-07
=========================
+gh-112936: IDLE - Include Shell menu in single-process mode,
+though with Restart Shell and View Last Restart disabled.
+Patch by Zhikang Yan.
+
+gh-112938: IDLE - Fix uninteruptable hang when Shell gets
+rapid continuous output.
+
gh-127060: Set TERM environment variable to 'dumb' to not add ANSI escape
sequences for text color in tracebacks. IDLE does not understand them.
Patch by Victor Stinner.
diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py
index 69e933a6541..8b60b9d5c9c 100644
--- a/Lib/ipaddress.py
+++ b/Lib/ipaddress.py
@@ -1660,10 +1660,12 @@ class _BaseV6:
"""
if not ip_str:
raise AddressValueError('Address cannot be empty')
- if len(ip_str) > 39:
- msg = ("At most 39 characters expected in "
- f"{ip_str[:14]!r}({len(ip_str)-28} chars elided){ip_str[-14:]!r}")
- raise AddressValueError(msg)
+ if len(ip_str) > 45:
+ shorten = ip_str
+ if len(shorten) > 100:
+ shorten = f'{ip_str[:45]}({len(ip_str)-90} chars elided){ip_str[-45:]}'
+ raise AddressValueError(f"At most 45 characters expected in "
+ f"{shorten!r}")
# We want to allow more parts than the max to be 'split'
# to preserve the correct error message when there are
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index 016638549aa..bc446e0f377 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -348,7 +348,6 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
item_separator = _item_separator + newline_indent
- yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
@@ -381,6 +380,8 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
f'not {key.__class__.__name__}')
if first:
first = False
+ if newline_indent is not None:
+ yield newline_indent
else:
yield item_separator
yield _encoder(key)
@@ -413,7 +414,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
except BaseException as exc:
exc.add_note(f'when serializing {type(dct).__name__} item {key!r}')
raise
- if newline_indent is not None:
+ if not first and newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield '}'
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 52ff2af743a..9cdc16480f9 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -29,7 +29,7 @@ __all__ = ["normcase","isabs","join","splitdrive","splitroot","split","splitext"
"abspath","curdir","pardir","sep","pathsep","defpath","altsep",
"extsep","devnull","realpath","supports_unicode_filenames","relpath",
"samefile", "sameopenfile", "samestat", "commonpath", "isjunction",
- "isdevdrive"]
+ "isdevdrive", "ALLOW_MISSING"]
def _get_bothseps(path):
if isinstance(path, bytes):
@@ -601,9 +601,10 @@ try:
from nt import _findfirstfile, _getfinalpathname, readlink as _nt_readlink
except ImportError:
# realpath is a no-op on systems without _getfinalpathname support.
- realpath = abspath
+ def realpath(path, *, strict=False):
+ return abspath(path)
else:
- def _readlink_deep(path):
+ def _readlink_deep(path, ignored_error=OSError):
# These error codes indicate that we should stop reading links and
# return the path we currently have.
# 1: ERROR_INVALID_FUNCTION
@@ -636,7 +637,7 @@ else:
path = old_path
break
path = normpath(join(dirname(old_path), path))
- except OSError as ex:
+ except ignored_error as ex:
if ex.winerror in allowed_winerror:
break
raise
@@ -645,7 +646,7 @@ else:
break
return path
- def _getfinalpathname_nonstrict(path):
+ def _getfinalpathname_nonstrict(path, ignored_error=OSError):
# These error codes indicate that we should stop resolving the path
# and return the value we currently have.
# 1: ERROR_INVALID_FUNCTION
@@ -673,17 +674,18 @@ else:
try:
path = _getfinalpathname(path)
return join(path, tail) if tail else path
- except OSError as ex:
+ except ignored_error as ex:
if ex.winerror not in allowed_winerror:
raise
try:
# The OS could not resolve this path fully, so we attempt
# to follow the link ourselves. If we succeed, join the tail
# and return.
- new_path = _readlink_deep(path)
+ new_path = _readlink_deep(path,
+ ignored_error=ignored_error)
if new_path != path:
return join(new_path, tail) if tail else new_path
- except OSError:
+ except ignored_error:
# If we fail to readlink(), let's keep traversing
pass
# If we get these errors, try to get the real name of the file without accessing it.
@@ -691,7 +693,7 @@ else:
try:
name = _findfirstfile(path)
path, _ = split(path)
- except OSError:
+ except ignored_error:
path, name = split(path)
else:
path, name = split(path)
@@ -721,6 +723,15 @@ else:
if normcase(path) == devnull:
return '\\\\.\\NUL'
had_prefix = path.startswith(prefix)
+
+ if strict is ALLOW_MISSING:
+ ignored_error = FileNotFoundError
+ strict = True
+ elif strict:
+ ignored_error = ()
+ else:
+ ignored_error = OSError
+
if not had_prefix and not isabs(path):
path = join(cwd, path)
try:
@@ -728,17 +739,16 @@ else:
initial_winerror = 0
except ValueError as ex:
# gh-106242: Raised for embedded null characters
- # In strict mode, we convert into an OSError.
+ # In strict modes, we convert into an OSError.
# Non-strict mode returns the path as-is, since we've already
# made it absolute.
if strict:
raise OSError(str(ex)) from None
path = normpath(path)
- except OSError as ex:
- if strict:
- raise
+ except ignored_error as ex:
initial_winerror = ex.winerror
- path = _getfinalpathname_nonstrict(path)
+ path = _getfinalpathname_nonstrict(path,
+ ignored_error=ignored_error)
# The path returned by _getfinalpathname will always start with \\?\ -
# strip off that prefix unless it was already provided on the original
# path.
diff --git a/Lib/posixpath.py b/Lib/posixpath.py
index db72ded8826..d38f3bd5872 100644
--- a/Lib/posixpath.py
+++ b/Lib/posixpath.py
@@ -36,7 +36,7 @@ __all__ = ["normcase","isabs","join","splitdrive","splitroot","split","splitext"
"samefile","sameopenfile","samestat",
"curdir","pardir","sep","pathsep","defpath","altsep","extsep",
"devnull","realpath","supports_unicode_filenames","relpath",
- "commonpath", "isjunction","isdevdrive"]
+ "commonpath", "isjunction","isdevdrive","ALLOW_MISSING"]
def _get_sep(path):
@@ -402,10 +402,18 @@ symbolic links encountered in the path."""
curdir = '.'
pardir = '..'
getcwd = os.getcwd
- return _realpath(filename, strict, sep, curdir, pardir, getcwd)
+ if strict is ALLOW_MISSING:
+ ignored_error = FileNotFoundError
+ strict = True
+ elif strict:
+ ignored_error = ()
+ else:
+ ignored_error = OSError
+
+ lstat = os.lstat
+ readlink = os.readlink
+ maxlinks = None
-def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir,
- getcwd=os.getcwd, lstat=os.lstat, readlink=os.readlink, maxlinks=None):
# The stack of unresolved path parts. When popped, a special value of None
# indicates that a symlink target has been resolved, and that the original
# symlink path can be retrieved by popping again. The [::-1] slice is a
@@ -477,27 +485,28 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir,
path = newpath
continue
target = readlink(newpath)
- except OSError:
- if strict:
- raise
- path = newpath
+ except ignored_error:
+ pass
+ else:
+ # Resolve the symbolic link
+ if target.startswith(sep):
+ # Symlink target is absolute; reset resolved path.
+ path = sep
+ if maxlinks is None:
+ # Mark this symlink as seen but not fully resolved.
+ seen[newpath] = None
+ # Push the symlink path onto the stack, and signal its specialness
+ # by also pushing None. When these entries are popped, we'll
+ # record the fully-resolved symlink target in the 'seen' mapping.
+ rest.append(newpath)
+ rest.append(None)
+ # Push the unresolved symlink target parts onto the stack.
+ target_parts = target.split(sep)[::-1]
+ rest.extend(target_parts)
+ part_count += len(target_parts)
continue
- # Resolve the symbolic link
- if target.startswith(sep):
- # Symlink target is absolute; reset resolved path.
- path = sep
- if maxlinks is None:
- # Mark this symlink as seen but not fully resolved.
- seen[newpath] = None
- # Push the symlink path onto the stack, and signal its specialness
- # by also pushing None. When these entries are popped, we'll
- # record the fully-resolved symlink target in the 'seen' mapping.
- rest.append(newpath)
- rest.append(None)
- # Push the unresolved symlink target parts onto the stack.
- target_parts = target.split(sep)[::-1]
- rest.extend(target_parts)
- part_count += len(target_parts)
+ # An error occurred and was ignored.
+ path = newpath
return path
diff --git a/Lib/shelve.py b/Lib/shelve.py
index 50584716e9e..b53dc8b7a8e 100644
--- a/Lib/shelve.py
+++ b/Lib/shelve.py
@@ -171,6 +171,11 @@ class Shelf(collections.abc.MutableMapping):
if hasattr(self.dict, 'sync'):
self.dict.sync()
+ def reorganize(self):
+ self.sync()
+ if hasattr(self.dict, 'reorganize'):
+ self.dict.reorganize()
+
class BsdDbShelf(Shelf):
"""Shelf implementation using the "BSD" db interface.
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 212b71f6509..068aa13ed70 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -67,7 +67,7 @@ __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
"DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
"tar_filter", "FilterError", "AbsoluteLinkError",
"OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
- "LinkOutsideDestinationError"]
+ "LinkOutsideDestinationError", "LinkFallbackError"]
#---------------------------------------------------------
@@ -766,10 +766,22 @@ class LinkOutsideDestinationError(FilterError):
super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+ 'which is outside the destination')
+class LinkFallbackError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'link {tarinfo.name!r} would be extracted as a '
+ + f'copy of {path!r}, which was rejected')
+
+# Errors caused by filters -- both "fatal" and "non-fatal" -- that
+# we consider to be issues with the argument, rather than a bug in the
+# filter function
+_FILTER_ERRORS = (FilterError, OSError, ExtractError)
+
def _get_filtered_attrs(member, dest_path, for_data=True):
new_attrs = {}
name = member.name
- dest_path = os.path.realpath(dest_path)
+ dest_path = os.path.realpath(dest_path, strict=os.path.ALLOW_MISSING)
# Strip leading / (tar's directory separator) from filenames.
# Include os.sep (target OS directory separator) as well.
if name.startswith(('/', os.sep)):
@@ -779,7 +791,8 @@ def _get_filtered_attrs(member, dest_path, for_data=True):
# For example, 'C:/foo' on Windows.
raise AbsolutePathError(member)
# Ensure we stay in the destination
- target_path = os.path.realpath(os.path.join(dest_path, name))
+ target_path = os.path.realpath(os.path.join(dest_path, name),
+ strict=os.path.ALLOW_MISSING)
if os.path.commonpath([target_path, dest_path]) != dest_path:
raise OutsideDestinationError(member, target_path)
# Limit permissions (no high bits, and go-w)
@@ -817,6 +830,9 @@ def _get_filtered_attrs(member, dest_path, for_data=True):
if member.islnk() or member.issym():
if os.path.isabs(member.linkname):
raise AbsoluteLinkError(member)
+ normalized = os.path.normpath(member.linkname)
+ if normalized != member.linkname:
+ new_attrs['linkname'] = normalized
if member.issym():
target_path = os.path.join(dest_path,
os.path.dirname(name),
@@ -824,7 +840,8 @@ def _get_filtered_attrs(member, dest_path, for_data=True):
else:
target_path = os.path.join(dest_path,
member.linkname)
- target_path = os.path.realpath(target_path)
+ target_path = os.path.realpath(target_path,
+ strict=os.path.ALLOW_MISSING)
if os.path.commonpath([target_path, dest_path]) != dest_path:
raise LinkOutsideDestinationError(member, target_path)
return new_attrs
@@ -2386,30 +2403,58 @@ class TarFile(object):
members = self
for member in members:
- tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ tarinfo, unfiltered = self._get_extract_tarinfo(
+ member, filter_function, path)
if tarinfo is None:
continue
if tarinfo.isdir():
# For directories, delay setting attributes until later,
# since permissions can interfere with extraction and
# extracting contents can reset mtime.
- directories.append(tarinfo)
+ directories.append(unfiltered)
self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
- numeric_owner=numeric_owner)
+ numeric_owner=numeric_owner,
+ filter_function=filter_function)
# Reverse sort directories.
directories.sort(key=lambda a: a.name, reverse=True)
+
# Set correct owner, mtime and filemode on directories.
- for tarinfo in directories:
- dirpath = os.path.join(path, tarinfo.name)
+ for unfiltered in directories:
try:
+ # Need to re-apply any filter, to take the *current* filesystem
+ # state into account.
+ try:
+ tarinfo = filter_function(unfiltered, path)
+ except _FILTER_ERRORS as exc:
+ self._log_no_directory_fixup(unfiltered, repr(exc))
+ continue
+ if tarinfo is None:
+ self._log_no_directory_fixup(unfiltered,
+ 'excluded by filter')
+ continue
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ lstat = os.lstat(dirpath)
+ except FileNotFoundError:
+ self._log_no_directory_fixup(tarinfo, 'missing')
+ continue
+ if not stat.S_ISDIR(lstat.st_mode):
+ # This is no longer a directory; presumably a later
+ # member overwrote the entry.
+ self._log_no_directory_fixup(tarinfo, 'not a directory')
+ continue
self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
self._handle_nonfatal_error(e)
+ def _log_no_directory_fixup(self, member, reason):
+ self._dbg(2, "tarfile: Not fixing up directory %r (%s)" %
+ (member.name, reason))
+
def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
filter=None):
"""Extract a member from the archive to the current working directory,
@@ -2425,41 +2470,56 @@ class TarFile(object):
String names of common filters are accepted.
"""
filter_function = self._get_filter_function(filter)
- tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ tarinfo, unfiltered = self._get_extract_tarinfo(
+ member, filter_function, path)
if tarinfo is not None:
self._extract_one(tarinfo, path, set_attrs, numeric_owner)
def _get_extract_tarinfo(self, member, filter_function, path):
- """Get filtered TarInfo (or None) from member, which might be a str"""
+ """Get (filtered, unfiltered) TarInfos from *member*
+
+ *member* might be a string.
+
+ Return (None, None) if not found.
+ """
+
if isinstance(member, str):
- tarinfo = self.getmember(member)
+ unfiltered = self.getmember(member)
else:
- tarinfo = member
+ unfiltered = member
- unfiltered = tarinfo
+ filtered = None
try:
- tarinfo = filter_function(tarinfo, path)
+ filtered = filter_function(unfiltered, path)
except (OSError, UnicodeEncodeError, FilterError) as e:
self._handle_fatal_error(e)
except ExtractError as e:
self._handle_nonfatal_error(e)
- if tarinfo is None:
+ if filtered is None:
self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
- return None
+ return None, None
+
# Prepare the link target for makelink().
- if tarinfo.islnk():
- tarinfo = copy.copy(tarinfo)
- tarinfo._link_target = os.path.join(path, tarinfo.linkname)
- return tarinfo
+ if filtered.islnk():
+ filtered = copy.copy(filtered)
+ filtered._link_target = os.path.join(path, filtered.linkname)
+ return filtered, unfiltered
+
+ def _extract_one(self, tarinfo, path, set_attrs, numeric_owner,
+ filter_function=None):
+ """Extract from filtered tarinfo to disk.
- def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
- """Extract from filtered tarinfo to disk"""
+ filter_function is only used when extracting a *different*
+ member (e.g. as fallback to creating a symlink)
+ """
self._check("r")
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs,
- numeric_owner=numeric_owner)
+ numeric_owner=numeric_owner,
+ filter_function=filter_function,
+ extraction_root=path)
except (OSError, UnicodeEncodeError) as e:
self._handle_fatal_error(e)
except ExtractError as e:
@@ -2517,9 +2577,13 @@ class TarFile(object):
return None
def _extract_member(self, tarinfo, targetpath, set_attrs=True,
- numeric_owner=False):
- """Extract the TarInfo object tarinfo to a physical
+ numeric_owner=False, *, filter_function=None,
+ extraction_root=None):
+ """Extract the filtered TarInfo object tarinfo to a physical
file called targetpath.
+
+ filter_function is only used when extracting a *different*
+ member (e.g. as fallback to creating a symlink)
"""
# Fetch the TarInfo object for the given name
# and build the destination pathname, replacing
@@ -2548,7 +2612,10 @@ class TarFile(object):
elif tarinfo.ischr() or tarinfo.isblk():
self.makedev(tarinfo, targetpath)
elif tarinfo.islnk() or tarinfo.issym():
- self.makelink(tarinfo, targetpath)
+ self.makelink_with_filter(
+ tarinfo, targetpath,
+ filter_function=filter_function,
+ extraction_root=extraction_root)
elif tarinfo.type not in SUPPORTED_TYPES:
self.makeunknown(tarinfo, targetpath)
else:
@@ -2631,10 +2698,18 @@ class TarFile(object):
os.makedev(tarinfo.devmajor, tarinfo.devminor))
def makelink(self, tarinfo, targetpath):
+ return self.makelink_with_filter(tarinfo, targetpath, None, None)
+
+ def makelink_with_filter(self, tarinfo, targetpath,
+ filter_function, extraction_root):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
+
+ filter_function is only used when extracting a *different*
+ member (e.g. as fallback to creating a link).
"""
+ keyerror_to_extracterror = False
try:
# For systems that support symbolic and hard links.
if tarinfo.issym():
@@ -2642,18 +2717,38 @@ class TarFile(object):
# Avoid FileExistsError on following os.symlink.
os.unlink(targetpath)
os.symlink(tarinfo.linkname, targetpath)
+ return
else:
if os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath)
- else:
- self._extract_member(self._find_link_target(tarinfo),
- targetpath)
+ return
except symlink_exception:
+ keyerror_to_extracterror = True
+
+ try:
+ unfiltered = self._find_link_target(tarinfo)
+ except KeyError:
+ if keyerror_to_extracterror:
+ raise ExtractError(
+ "unable to resolve link inside archive") from None
+ else:
+ raise
+
+ if filter_function is None:
+ filtered = unfiltered
+ else:
+ if extraction_root is None:
+ raise ExtractError(
+ "makelink_with_filter: if filter_function is not None, "
+ + "extraction_root must also not be None")
try:
- self._extract_member(self._find_link_target(tarinfo),
- targetpath)
- except KeyError:
- raise ExtractError("unable to resolve link inside archive") from None
+ filtered = filter_function(unfiltered, extraction_root)
+ except _FILTER_ERRORS as cause:
+ raise LinkFallbackError(tarinfo, unfiltered.name) from cause
+ if filtered is not None:
+ self._extract_member(filtered, targetpath,
+ filter_function=filter_function,
+ extraction_root=extraction_root)
def chown(self, tarinfo, targetpath, numeric_owner):
"""Set owner of targetpath according to tarinfo. If numeric_owner
diff --git a/Lib/test/_code_definitions.py b/Lib/test/_code_definitions.py
index 733a15b25f6..274beb65a6d 100644
--- a/Lib/test/_code_definitions.py
+++ b/Lib/test/_code_definitions.py
@@ -57,6 +57,15 @@ def spam_with_globals_and_builtins():
print(res)
+def spam_full_args(a, b, /, c, d, *args, e, f, **kwargs):
+ return (a, b, c, d, e, f, args, kwargs)
+
+
+def spam_full_args_with_defaults(a=-1, b=-2, /, c=-3, d=-4, *args,
+ e=-5, f=-6, **kwargs):
+ return (a, b, c, d, e, f, args, kwargs)
+
+
def spam_args_attrs_and_builtins(a, b, /, c, d, *args, e, f, **kwargs):
if args.__len__() > 2:
return None
@@ -67,6 +76,10 @@ def spam_returns_arg(x):
return x
+def spam_raises():
+ raise Exception('spam!')
+
+
def spam_with_inner_not_closure():
def eggs():
pass
@@ -177,8 +190,11 @@ TOP_FUNCTIONS = [
spam_minimal,
spam_with_builtins,
spam_with_globals_and_builtins,
+ spam_full_args,
+ spam_full_args_with_defaults,
spam_args_attrs_and_builtins,
spam_returns_arg,
+ spam_raises,
spam_with_inner_not_closure,
spam_with_inner_closure,
spam_annotated,
@@ -219,8 +235,10 @@ STATELESS_FUNCTIONS = [
spam,
spam_minimal,
spam_with_builtins,
+ spam_full_args,
spam_args_attrs_and_builtins,
spam_returns_arg,
+ spam_raises,
spam_annotated,
spam_with_inner_not_closure,
spam_with_inner_closure,
@@ -238,6 +256,7 @@ STATELESS_FUNCTIONS = [
STATELESS_CODE = [
*STATELESS_FUNCTIONS,
script_with_globals,
+ spam_full_args_with_defaults,
spam_with_globals_and_builtins,
spam_full,
]
@@ -248,6 +267,7 @@ PURE_SCRIPT_FUNCTIONS = [
script_with_explicit_empty_return,
spam_minimal,
spam_with_builtins,
+ spam_raises,
spam_with_inner_not_closure,
spam_with_inner_closure,
]
diff --git a/Lib/test/pythoninfo.py b/Lib/test/pythoninfo.py
index e1830f2e6eb..80a262c18a5 100644
--- a/Lib/test/pythoninfo.py
+++ b/Lib/test/pythoninfo.py
@@ -920,10 +920,17 @@ def collect_windows(info_add):
try:
import _winapi
- dll_path = _winapi.GetModuleFileName(sys.dllhandle)
- info_add('windows.dll_path', dll_path)
- except (ImportError, AttributeError):
+ except ImportError:
pass
+ else:
+ try:
+ dll_path = _winapi.GetModuleFileName(sys.dllhandle)
+ info_add('windows.dll_path', dll_path)
+ except AttributeError:
+ pass
+
+ call_func(info_add, 'windows.ansi_code_page', _winapi, 'GetACP')
+ call_func(info_add, 'windows.oem_code_page', _winapi, 'GetOEMCP')
# windows.version_caption: "wmic os get Caption,Version /value" command
import subprocess
diff --git a/Lib/test/subprocessdata/fd_status.py b/Lib/test/subprocessdata/fd_status.py
index d12bd95abee..90e785981ae 100644
--- a/Lib/test/subprocessdata/fd_status.py
+++ b/Lib/test/subprocessdata/fd_status.py
@@ -2,7 +2,7 @@
file descriptors on stdout.
Usage:
-fd_stats.py: check all file descriptors
+fd_status.py: check all file descriptors (up to 255)
fd_status.py fd1 fd2 ...: check only specified file descriptors
"""
@@ -18,7 +18,7 @@ if __name__ == "__main__":
_MAXFD = os.sysconf("SC_OPEN_MAX")
except:
_MAXFD = 256
- test_fds = range(0, _MAXFD)
+ test_fds = range(0, min(_MAXFD, 256))
else:
test_fds = map(int, sys.argv[1:])
for fd in test_fds:
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index 351d832a26d..f9b66b88d3d 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -1084,7 +1084,7 @@ def set_memlimit(limit: str) -> None:
global real_max_memuse
memlimit = _parse_memlimit(limit)
if memlimit < _2G - 1:
- raise ValueError('Memory limit {limit!r} too low to be useful')
+ raise ValueError(f'Memory limit {limit!r} too low to be useful')
real_max_memuse = memlimit
memlimit = min(memlimit, MAX_Py_ssize_t)
@@ -2358,7 +2358,7 @@ def infinite_recursion(max_depth=None):
# very deep recursion.
max_depth = 20_000
elif max_depth < 3:
- raise ValueError("max_depth must be at least 3, got {max_depth}")
+ raise ValueError(f"max_depth must be at least 3, got {max_depth}")
depth = get_recursion_depth()
depth = max(depth - 1, 1) # Ignore infinite_recursion() frame.
limit = depth + max_depth
diff --git a/Lib/test/support/interpreters/__init__.py b/Lib/test/support/interpreters/__init__.py
index e067f259364..6d1b0690805 100644
--- a/Lib/test/support/interpreters/__init__.py
+++ b/Lib/test/support/interpreters/__init__.py
@@ -226,33 +226,32 @@ class Interpreter:
if excinfo is not None:
raise ExecutionFailed(excinfo)
- def call(self, callable, /):
- """Call the object in the interpreter with given args/kwargs.
+ def _call(self, callable, args, kwargs):
+ res, excinfo = _interpreters.call(self._id, callable, args, kwargs, restrict=True)
+ if excinfo is not None:
+ raise ExecutionFailed(excinfo)
+ return res
- Only functions that take no arguments and have no closure
- are supported.
+ def call(self, callable, /, *args, **kwargs):
+ """Call the object in the interpreter with given args/kwargs.
- The return value is discarded.
+ Nearly all callables, args, kwargs, and return values are
+ supported. All "shareable" objects are supported, as are
+ "stateless" functions (meaning non-closures that do not use
+ any globals). This method will fall back to pickle.
If the callable raises an exception then the error display
- (including full traceback) is send back between the interpreters
+ (including full traceback) is sent back between the interpreters
and an ExecutionFailed exception is raised, much like what
happens with Interpreter.exec().
"""
- # XXX Support args and kwargs.
- # XXX Support arbitrary callables.
- # XXX Support returning the return value (e.g. via pickle).
- excinfo = _interpreters.call(self._id, callable, restrict=True)
- if excinfo is not None:
- raise ExecutionFailed(excinfo)
+ return self._call(callable, args, kwargs)
- def call_in_thread(self, callable, /):
+ def call_in_thread(self, callable, /, *args, **kwargs):
"""Return a new thread that calls the object in the interpreter.
The return value and any raised exception are discarded.
"""
- def task():
- self.call(callable)
- t = threading.Thread(target=task)
+ t = threading.Thread(target=self._call, args=(callable, args, kwargs))
t.start()
return t
diff --git a/Lib/test/support/interpreters/channels.py b/Lib/test/support/interpreters/channels.py
index b25a17b1aab..1724759b75a 100644
--- a/Lib/test/support/interpreters/channels.py
+++ b/Lib/test/support/interpreters/channels.py
@@ -69,7 +69,7 @@ def list_all():
if not hasattr(send, '_unboundop'):
send._set_unbound(unboundop)
else:
- assert send._unbound[0] == op
+ assert send._unbound[0] == unboundop
channels.append(chan)
return channels
diff --git a/Lib/test/test__interpreters.py b/Lib/test/test__interpreters.py
index 63fdaad8de7..ad3ebbfdff6 100644
--- a/Lib/test/test__interpreters.py
+++ b/Lib/test/test__interpreters.py
@@ -474,13 +474,15 @@ class CommonTests(TestBase):
def test_signatures(self):
# See https://github.com/python/cpython/issues/126654
- msg = "expected 'shared' to be a dict"
+ msg = r'_interpreters.exec\(\) argument 3 must be dict, not int'
with self.assertRaisesRegex(TypeError, msg):
_interpreters.exec(self.id, 'a', 1)
with self.assertRaisesRegex(TypeError, msg):
_interpreters.exec(self.id, 'a', shared=1)
+ msg = r'_interpreters.run_string\(\) argument 3 must be dict, not int'
with self.assertRaisesRegex(TypeError, msg):
_interpreters.run_string(self.id, 'a', shared=1)
+ msg = r'_interpreters.run_func\(\) argument 3 must be dict, not int'
with self.assertRaisesRegex(TypeError, msg):
_interpreters.run_func(self.id, lambda: None, shared=1)
@@ -952,7 +954,8 @@ class RunFailedTests(TestBase):
""")
with self.subTest('script'):
- self.assert_run_failed(SyntaxError, script)
+ with self.assertRaises(SyntaxError):
+ _interpreters.run_string(self.id, script)
with self.subTest('module'):
modname = 'spam_spam_spam'
@@ -1019,12 +1022,19 @@ class RunFuncTests(TestBase):
with open(w, 'w', encoding="utf-8") as spipe:
with contextlib.redirect_stdout(spipe):
print('it worked!', end='')
+ failed = None
def f():
- _interpreters.set___main___attrs(self.id, dict(w=w))
- _interpreters.run_func(self.id, script)
+ nonlocal failed
+ try:
+ _interpreters.set___main___attrs(self.id, dict(w=w))
+ _interpreters.run_func(self.id, script)
+ except Exception as exc:
+ failed = exc
t = threading.Thread(target=f)
t.start()
t.join()
+ if failed:
+ raise Exception from failed
with open(r, encoding="utf-8") as outfile:
out = outfile.read()
@@ -1053,19 +1063,16 @@ class RunFuncTests(TestBase):
spam = True
def script():
assert spam
-
- with self.assertRaises(TypeError):
+ with self.assertRaises(ValueError):
_interpreters.run_func(self.id, script)
- # XXX This hasn't been fixed yet.
- @unittest.expectedFailure
def test_return_value(self):
def script():
return 'spam'
with self.assertRaises(ValueError):
_interpreters.run_func(self.id, script)
- @unittest.skip("we're not quite there yet")
+# @unittest.skip("we're not quite there yet")
def test_args(self):
with self.subTest('args'):
def script(a, b=0):
diff --git a/Lib/test/test_ast/test_ast.py b/Lib/test/test_ast/test_ast.py
index 46745cfa8f8..cc46529c0ef 100644
--- a/Lib/test/test_ast/test_ast.py
+++ b/Lib/test/test_ast/test_ast.py
@@ -1372,17 +1372,17 @@ class ASTHelpers_Test(unittest.TestCase):
def test_dump(self):
node = ast.parse('spam(eggs, "and cheese")')
self.assertEqual(ast.dump(node),
- "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), "
- "args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')]))])"
+ "Module(body=[Expr(value=Call(func=Name(id='spam'), "
+ "args=[Name(id='eggs'), Constant(value='and cheese')]))])"
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "Module([Expr(Call(Name('spam', Load()), [Name('eggs', Load()), "
+ "Module([Expr(Call(Name('spam'), [Name('eggs'), "
"Constant('and cheese')]))])"
)
self.assertEqual(ast.dump(node, include_attributes=True),
- "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load(), "
+ "Module(body=[Expr(value=Call(func=Name(id='spam', "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=4), "
- "args=[Name(id='eggs', ctx=Load(), lineno=1, col_offset=5, "
+ "args=[Name(id='eggs', lineno=1, col_offset=5, "
"end_lineno=1, end_col_offset=9), Constant(value='and cheese', "
"lineno=1, col_offset=11, end_lineno=1, end_col_offset=23)], "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=24), "
@@ -1396,18 +1396,18 @@ Module(
body=[
Expr(
value=Call(
- func=Name(id='spam', ctx=Load()),
+ func=Name(id='spam'),
args=[
- Name(id='eggs', ctx=Load()),
+ Name(id='eggs'),
Constant(value='and cheese')]))])""")
self.assertEqual(ast.dump(node, annotate_fields=False, indent='\t'), """\
Module(
\t[
\t\tExpr(
\t\t\tCall(
-\t\t\t\tName('spam', Load()),
+\t\t\t\tName('spam'),
\t\t\t\t[
-\t\t\t\t\tName('eggs', Load()),
+\t\t\t\t\tName('eggs'),
\t\t\t\t\tConstant('and cheese')]))])""")
self.assertEqual(ast.dump(node, include_attributes=True, indent=3), """\
Module(
@@ -1416,7 +1416,6 @@ Module(
value=Call(
func=Name(
id='spam',
- ctx=Load(),
lineno=1,
col_offset=0,
end_lineno=1,
@@ -1424,7 +1423,6 @@ Module(
args=[
Name(
id='eggs',
- ctx=Load(),
lineno=1,
col_offset=5,
end_lineno=1,
@@ -1454,23 +1452,23 @@ Module(
)
node = ast.Raise(exc=ast.Name(id='e', ctx=ast.Load()), lineno=3, col_offset=4)
self.assertEqual(ast.dump(node),
- "Raise(exc=Name(id='e', ctx=Load()))"
+ "Raise(exc=Name(id='e'))"
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "Raise(Name('e', Load()))"
+ "Raise(Name('e'))"
)
self.assertEqual(ast.dump(node, include_attributes=True),
- "Raise(exc=Name(id='e', ctx=Load()), lineno=3, col_offset=4)"
+ "Raise(exc=Name(id='e'), lineno=3, col_offset=4)"
)
self.assertEqual(ast.dump(node, annotate_fields=False, include_attributes=True),
- "Raise(Name('e', Load()), lineno=3, col_offset=4)"
+ "Raise(Name('e'), lineno=3, col_offset=4)"
)
node = ast.Raise(cause=ast.Name(id='e', ctx=ast.Load()))
self.assertEqual(ast.dump(node),
- "Raise(cause=Name(id='e', ctx=Load()))"
+ "Raise(cause=Name(id='e'))"
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "Raise(cause=Name('e', Load()))"
+ "Raise(cause=Name('e'))"
)
# Arguments:
node = ast.arguments(args=[ast.arg("x")])
@@ -1502,10 +1500,10 @@ Module(
[ast.Name('dataclass', ctx=ast.Load())],
)
self.assertEqual(ast.dump(node),
- "ClassDef(name='T', keywords=[keyword(arg='a', value=Constant(value=None))], decorator_list=[Name(id='dataclass', ctx=Load())])",
+ "ClassDef(name='T', keywords=[keyword(arg='a', value=Constant(value=None))], decorator_list=[Name(id='dataclass')])",
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "ClassDef('T', [], [keyword('a', Constant(None))], [], [Name('dataclass', Load())])",
+ "ClassDef('T', [], [keyword('a', Constant(None))], [], [Name('dataclass')])",
)
def test_dump_show_empty(self):
@@ -1533,7 +1531,7 @@ Module(
check_node(
# Corner case: there are no real `Name` instances with `id=''`:
ast.Name(id='', ctx=ast.Load()),
- empty="Name(id='', ctx=Load())",
+ empty="Name(id='')",
full="Name(id='', ctx=Load())",
)
@@ -1544,39 +1542,63 @@ Module(
)
check_node(
+ ast.MatchSingleton(value=[]),
+ empty="MatchSingleton(value=[])",
+ full="MatchSingleton(value=[])",
+ )
+
+ check_node(
ast.Constant(value=None),
empty="Constant(value=None)",
full="Constant(value=None)",
)
check_node(
+ ast.Constant(value=[]),
+ empty="Constant(value=[])",
+ full="Constant(value=[])",
+ )
+
+ check_node(
ast.Constant(value=''),
empty="Constant(value='')",
full="Constant(value='')",
)
+ check_node(
+ ast.Interpolation(value=ast.Constant(42), str=None, conversion=-1),
+ empty="Interpolation(value=Constant(value=42), str=None, conversion=-1)",
+ full="Interpolation(value=Constant(value=42), str=None, conversion=-1)",
+ )
+
+ check_node(
+ ast.Interpolation(value=ast.Constant(42), str=[], conversion=-1),
+ empty="Interpolation(value=Constant(value=42), str=[], conversion=-1)",
+ full="Interpolation(value=Constant(value=42), str=[], conversion=-1)",
+ )
+
check_text(
"def a(b: int = 0, *, c): ...",
- empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int', ctx=Load()))], kwonlyargs=[arg(arg='c')], kw_defaults=[None], defaults=[Constant(value=0)]), body=[Expr(value=Constant(value=Ellipsis))])])",
+ empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int'))], kwonlyargs=[arg(arg='c')], kw_defaults=[None], defaults=[Constant(value=0)]), body=[Expr(value=Constant(value=Ellipsis))])])",
full="Module(body=[FunctionDef(name='a', args=arguments(posonlyargs=[], args=[arg(arg='b', annotation=Name(id='int', ctx=Load()))], kwonlyargs=[arg(arg='c')], kw_defaults=[None], defaults=[Constant(value=0)]), body=[Expr(value=Constant(value=Ellipsis))], decorator_list=[], type_params=[])], type_ignores=[])",
)
check_text(
"def a(b: int = 0, *, c): ...",
- empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int', ctx=Load(), lineno=1, col_offset=9, end_lineno=1, end_col_offset=12), lineno=1, col_offset=6, end_lineno=1, end_col_offset=12)], kwonlyargs=[arg(arg='c', lineno=1, col_offset=21, end_lineno=1, end_col_offset=22)], kw_defaults=[None], defaults=[Constant(value=0, lineno=1, col_offset=15, end_lineno=1, end_col_offset=16)]), body=[Expr(value=Constant(value=Ellipsis, lineno=1, col_offset=25, end_lineno=1, end_col_offset=28), lineno=1, col_offset=25, end_lineno=1, end_col_offset=28)], lineno=1, col_offset=0, end_lineno=1, end_col_offset=28)])",
+ empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int', lineno=1, col_offset=9, end_lineno=1, end_col_offset=12), lineno=1, col_offset=6, end_lineno=1, end_col_offset=12)], kwonlyargs=[arg(arg='c', lineno=1, col_offset=21, end_lineno=1, end_col_offset=22)], kw_defaults=[None], defaults=[Constant(value=0, lineno=1, col_offset=15, end_lineno=1, end_col_offset=16)]), body=[Expr(value=Constant(value=Ellipsis, lineno=1, col_offset=25, end_lineno=1, end_col_offset=28), lineno=1, col_offset=25, end_lineno=1, end_col_offset=28)], lineno=1, col_offset=0, end_lineno=1, end_col_offset=28)])",
full="Module(body=[FunctionDef(name='a', args=arguments(posonlyargs=[], args=[arg(arg='b', annotation=Name(id='int', ctx=Load(), lineno=1, col_offset=9, end_lineno=1, end_col_offset=12), lineno=1, col_offset=6, end_lineno=1, end_col_offset=12)], kwonlyargs=[arg(arg='c', lineno=1, col_offset=21, end_lineno=1, end_col_offset=22)], kw_defaults=[None], defaults=[Constant(value=0, lineno=1, col_offset=15, end_lineno=1, end_col_offset=16)]), body=[Expr(value=Constant(value=Ellipsis, lineno=1, col_offset=25, end_lineno=1, end_col_offset=28), lineno=1, col_offset=25, end_lineno=1, end_col_offset=28)], decorator_list=[], type_params=[], lineno=1, col_offset=0, end_lineno=1, end_col_offset=28)], type_ignores=[])",
include_attributes=True,
)
check_text(
'spam(eggs, "and cheese")',
- empty="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')]))])",
+ empty="Module(body=[Expr(value=Call(func=Name(id='spam'), args=[Name(id='eggs'), Constant(value='and cheese')]))])",
full="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')], keywords=[]))], type_ignores=[])",
)
check_text(
'spam(eggs, text="and cheese")',
- empty="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load())], keywords=[keyword(arg='text', value=Constant(value='and cheese'))]))])",
+ empty="Module(body=[Expr(value=Call(func=Name(id='spam'), args=[Name(id='eggs')], keywords=[keyword(arg='text', value=Constant(value='and cheese'))]))])",
full="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load())], keywords=[keyword(arg='text', value=Constant(value='and cheese'))]))], type_ignores=[])",
)
@@ -1610,12 +1632,12 @@ Module(
self.assertEqual(src, ast.fix_missing_locations(src))
self.maxDiff = None
self.assertEqual(ast.dump(src, include_attributes=True),
- "Module(body=[Expr(value=Call(func=Name(id='write', ctx=Load(), "
+ "Module(body=[Expr(value=Call(func=Name(id='write', "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=5), "
"args=[Constant(value='spam', lineno=1, col_offset=6, end_lineno=1, "
"end_col_offset=12)], lineno=1, col_offset=0, end_lineno=1, "
"end_col_offset=13), lineno=1, col_offset=0, end_lineno=1, "
- "end_col_offset=13), Expr(value=Call(func=Name(id='spam', ctx=Load(), "
+ "end_col_offset=13), Expr(value=Call(func=Name(id='spam', "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=0), "
"args=[Constant(value='eggs', lineno=1, col_offset=0, end_lineno=1, "
"end_col_offset=0)], lineno=1, col_offset=0, end_lineno=1, "
@@ -3335,7 +3357,7 @@ class CommandLineTests(unittest.TestCase):
body=[
AnnAssign(
target=Name(id='x', ctx=Store()),
- annotation=Name(id='bool', ctx=Load()),
+ annotation=Name(id='bool'),
value=Constant(value=1),
simple=1)],
type_ignores=[
@@ -3363,7 +3385,7 @@ class CommandLineTests(unittest.TestCase):
expect = '''
Expression(
body=Call(
- func=Name(id='print', ctx=Load()),
+ func=Name(id='print'),
args=[
Constant(value=1),
Constant(value=2),
@@ -3379,12 +3401,11 @@ class CommandLineTests(unittest.TestCase):
expect = '''
FunctionType(
argtypes=[
- Name(id='int', ctx=Load()),
- Name(id='str', ctx=Load())],
+ Name(id='int'),
+ Name(id='str')],
returns=Subscript(
- value=Name(id='list', ctx=Load()),
- slice=Name(id='int', ctx=Load()),
- ctx=Load()))
+ value=Name(id='list'),
+ slice=Name(id='int')))
'''
for flag in ('-m=func_type', '--mode=func_type'):
with self.subTest(flag=flag):
@@ -3398,7 +3419,7 @@ class CommandLineTests(unittest.TestCase):
body=[
AnnAssign(
target=Name(id='x', ctx=Store()),
- annotation=Name(id='bool', ctx=Load()),
+ annotation=Name(id='bool'),
value=Constant(value=1),
simple=1)])
'''
@@ -3443,7 +3464,7 @@ class CommandLineTests(unittest.TestCase):
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchValue(
@@ -3466,7 +3487,7 @@ class CommandLineTests(unittest.TestCase):
Module(
body=[
Match(
- subject=Name(id='a', ctx=Load()),
+ subject=Name(id='a'),
cases=[
match_case(
pattern=MatchValue(
@@ -3492,7 +3513,7 @@ class CommandLineTests(unittest.TestCase):
Module(
body=[
Match(
- subject=Name(id='a', ctx=Load()),
+ subject=Name(id='a'),
cases=[
match_case(
pattern=MatchValue(
diff --git a/Lib/test/test_asyncgen.py b/Lib/test/test_asyncgen.py
index 2c44647bf3e..636cb33dd98 100644
--- a/Lib/test/test_asyncgen.py
+++ b/Lib/test/test_asyncgen.py
@@ -2021,6 +2021,15 @@ class TestUnawaitedWarnings(unittest.TestCase):
g.athrow(RuntimeError)
gc_collect()
+ def test_athrow_throws_immediately(self):
+ async def gen():
+ yield 1
+
+ g = gen()
+ msg = "athrow expected at least 1 argument, got 0"
+ with self.assertRaisesRegex(TypeError, msg):
+ g.athrow()
+
def test_aclose(self):
async def gen():
yield 1
diff --git a/Lib/test/test_capi/test_sys.py b/Lib/test/test_capi/test_sys.py
index d3a9b378e77..3793ce2461e 100644
--- a/Lib/test/test_capi/test_sys.py
+++ b/Lib/test/test_capi/test_sys.py
@@ -19,6 +19,68 @@ class CAPITest(unittest.TestCase):
maxDiff = None
+ @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module')
+ def test_sys_getattr(self):
+ # Test PySys_GetAttr()
+ sys_getattr = _testlimitedcapi.sys_getattr
+
+ self.assertIs(sys_getattr('stdout'), sys.stdout)
+ with support.swap_attr(sys, '\U0001f40d', 42):
+ self.assertEqual(sys_getattr('\U0001f40d'), 42)
+
+ with self.assertRaisesRegex(RuntimeError, r'lost sys\.nonexistent'):
+ sys_getattr('nonexistent')
+ with self.assertRaisesRegex(RuntimeError, r'lost sys\.\U0001f40d'):
+ sys_getattr('\U0001f40d')
+ self.assertRaises(TypeError, sys_getattr, 1)
+ self.assertRaises(TypeError, sys_getattr, [])
+ # CRASHES sys_getattr(NULL)
+
+ @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module')
+ def test_sys_getattrstring(self):
+ # Test PySys_GetAttrString()
+ getattrstring = _testlimitedcapi.sys_getattrstring
+
+ self.assertIs(getattrstring(b'stdout'), sys.stdout)
+ with support.swap_attr(sys, '\U0001f40d', 42):
+ self.assertEqual(getattrstring('\U0001f40d'.encode()), 42)
+
+ with self.assertRaisesRegex(RuntimeError, r'lost sys\.nonexistent'):
+ getattrstring(b'nonexistent')
+ with self.assertRaisesRegex(RuntimeError, r'lost sys\.\U0001f40d'):
+ getattrstring('\U0001f40d'.encode())
+ self.assertRaises(UnicodeDecodeError, getattrstring, b'\xff')
+ # CRASHES getattrstring(NULL)
+
+ @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module')
+ def test_sys_getoptionalattr(self):
+ # Test PySys_GetOptionalAttr()
+ getoptionalattr = _testlimitedcapi.sys_getoptionalattr
+
+ self.assertIs(getoptionalattr('stdout'), sys.stdout)
+ with support.swap_attr(sys, '\U0001f40d', 42):
+ self.assertEqual(getoptionalattr('\U0001f40d'), 42)
+
+ self.assertIs(getoptionalattr('nonexistent'), AttributeError)
+ self.assertIs(getoptionalattr('\U0001f40d'), AttributeError)
+ self.assertRaises(TypeError, getoptionalattr, 1)
+ self.assertRaises(TypeError, getoptionalattr, [])
+ # CRASHES getoptionalattr(NULL)
+
+ @unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module')
+ def test_sys_getoptionalattrstring(self):
+ # Test PySys_GetOptionalAttrString()
+ getoptionalattrstring = _testlimitedcapi.sys_getoptionalattrstring
+
+ self.assertIs(getoptionalattrstring(b'stdout'), sys.stdout)
+ with support.swap_attr(sys, '\U0001f40d', 42):
+ self.assertEqual(getoptionalattrstring('\U0001f40d'.encode()), 42)
+
+ self.assertIs(getoptionalattrstring(b'nonexistent'), AttributeError)
+ self.assertIs(getoptionalattrstring('\U0001f40d'.encode()), AttributeError)
+ self.assertRaises(UnicodeDecodeError, getoptionalattrstring, b'\xff')
+ # CRASHES getoptionalattrstring(NULL)
+
@support.cpython_only
@unittest.skipIf(_testlimitedcapi is None, 'need _testlimitedcapi module')
def test_sys_getobject(self):
@@ -29,7 +91,7 @@ class CAPITest(unittest.TestCase):
with support.swap_attr(sys, '\U0001f40d', 42):
self.assertEqual(getobject('\U0001f40d'.encode()), 42)
- self.assertIs(getobject(b'nonexisting'), AttributeError)
+ self.assertIs(getobject(b'nonexistent'), AttributeError)
with support.catch_unraisable_exception() as cm:
self.assertIs(getobject(b'\xff'), AttributeError)
self.assertEqual(cm.unraisable.exc_type, UnicodeDecodeError)
diff --git a/Lib/test/test_capi/test_type.py b/Lib/test/test_capi/test_type.py
index 3c9974c7387..15fb4a93e2a 100644
--- a/Lib/test/test_capi/test_type.py
+++ b/Lib/test/test_capi/test_type.py
@@ -264,3 +264,13 @@ class TypeTests(unittest.TestCase):
ManualHeapType = _testcapi.ManualHeapType
for i in range(100):
self.assertIsInstance(ManualHeapType(), ManualHeapType)
+
+ def test_extension_managed_dict_type(self):
+ ManagedDictType = _testcapi.ManagedDictType
+ obj = ManagedDictType()
+ obj.foo = 42
+ self.assertEqual(obj.foo, 42)
+ self.assertEqual(obj.__dict__, {'foo': 42})
+ obj.__dict__ = {'bar': 3}
+ self.assertEqual(obj.__dict__, {'bar': 3})
+ self.assertEqual(obj.bar, 3)
diff --git a/Lib/test/test_capi/test_unicode.py b/Lib/test/test_capi/test_unicode.py
index 3408c10f426..c8be4f3faa9 100644
--- a/Lib/test/test_capi/test_unicode.py
+++ b/Lib/test/test_capi/test_unicode.py
@@ -1776,6 +1776,13 @@ class PyUnicodeWriterTest(unittest.TestCase):
self.assertEqual(writer.finish(),
"ascii-latin1=\xE9-euro=\u20AC.")
+ def test_ascii(self):
+ writer = self.create_writer(0)
+ writer.write_ascii(b"Hello ", -1)
+ writer.write_ascii(b"", 0)
+ writer.write_ascii(b"Python! <truncated>", 6)
+ self.assertEqual(writer.finish(), "Hello Python")
+
def test_invalid_utf8(self):
writer = self.create_writer(0)
with self.assertRaises(UnicodeDecodeError):
diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py
index 4c12d43556f..8c7a62a74ba 100644
--- a/Lib/test/test_class.py
+++ b/Lib/test/test_class.py
@@ -652,6 +652,7 @@ class ClassTests(unittest.TestCase):
a = A(hash(A.f)^(-1))
hash(a.f)
+ @cpython_only
def testSetattrWrapperNameIntern(self):
# Issue #25794: __setattr__ should intern the attribute name
class A:
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index 32cf8aacaf6..9fc2b047bef 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -701,6 +701,26 @@ class CodeTest(unittest.TestCase):
'checks': CO_FAST_LOCAL,
'res': CO_FAST_LOCAL,
},
+ defs.spam_full_args: {
+ 'a': POSONLY,
+ 'b': POSONLY,
+ 'c': POSORKW,
+ 'd': POSORKW,
+ 'e': KWONLY,
+ 'f': KWONLY,
+ 'args': VARARGS,
+ 'kwargs': VARKWARGS,
+ },
+ defs.spam_full_args_with_defaults: {
+ 'a': POSONLY,
+ 'b': POSONLY,
+ 'c': POSORKW,
+ 'd': POSORKW,
+ 'e': KWONLY,
+ 'f': KWONLY,
+ 'args': VARARGS,
+ 'kwargs': VARKWARGS,
+ },
defs.spam_args_attrs_and_builtins: {
'a': POSONLY,
'b': POSONLY,
@@ -714,6 +734,7 @@ class CodeTest(unittest.TestCase):
defs.spam_returns_arg: {
'x': POSORKW,
},
+ defs.spam_raises: {},
defs.spam_with_inner_not_closure: {
'eggs': CO_FAST_LOCAL,
},
@@ -934,6 +955,20 @@ class CodeTest(unittest.TestCase):
purelocals=5,
globalvars=6,
),
+ defs.spam_full_args: new_var_counts(
+ posonly=2,
+ posorkw=2,
+ kwonly=2,
+ varargs=1,
+ varkwargs=1,
+ ),
+ defs.spam_full_args_with_defaults: new_var_counts(
+ posonly=2,
+ posorkw=2,
+ kwonly=2,
+ varargs=1,
+ varkwargs=1,
+ ),
defs.spam_args_attrs_and_builtins: new_var_counts(
posonly=2,
posorkw=2,
@@ -945,6 +980,9 @@ class CodeTest(unittest.TestCase):
defs.spam_returns_arg: new_var_counts(
posorkw=1,
),
+ defs.spam_raises: new_var_counts(
+ globalvars=1,
+ ),
defs.spam_with_inner_not_closure: new_var_counts(
purelocals=1,
),
@@ -1097,10 +1135,16 @@ class CodeTest(unittest.TestCase):
def test_stateless(self):
self.maxDiff = None
+ STATELESS_FUNCTIONS = [
+ *defs.STATELESS_FUNCTIONS,
+ # stateless with defaults
+ defs.spam_full_args_with_defaults,
+ ]
+
for func in defs.STATELESS_CODE:
with self.subTest((func, '(code)')):
_testinternalcapi.verify_stateless_code(func.__code__)
- for func in defs.STATELESS_FUNCTIONS:
+ for func in STATELESS_FUNCTIONS:
with self.subTest((func, '(func)')):
_testinternalcapi.verify_stateless_code(func)
@@ -1110,7 +1154,7 @@ class CodeTest(unittest.TestCase):
with self.assertRaises(Exception):
_testinternalcapi.verify_stateless_code(func.__code__)
- if func not in defs.STATELESS_FUNCTIONS:
+ if func not in STATELESS_FUNCTIONS:
with self.subTest((func, '(func)')):
with self.assertRaises(Exception):
_testinternalcapi.verify_stateless_code(func)
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index 8c9a0972492..d8666f7290e 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -1,6 +1,7 @@
import codecs
import contextlib
import copy
+import importlib
import io
import pickle
import os
@@ -3111,9 +3112,9 @@ class TransformCodecTest(unittest.TestCase):
def test_alias_modules_exist(self):
encodings_dir = os.path.dirname(encodings.__file__)
for value in encodings.aliases.aliases.values():
- codec_file = os.path.join(encodings_dir, value + ".py")
- self.assertTrue(os.path.isfile(codec_file),
- "Codec file not found: " + codec_file)
+ codec_mod = f"encodings.{value}"
+ self.assertIsNotNone(importlib.util.find_spec(codec_mod),
+ f"Codec module not found: {codec_mod}")
def test_quopri_stateless(self):
# Should encode with quotetabs=True
diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py
index 9aace57633b..60feab225a1 100644
--- a/Lib/test/test_csv.py
+++ b/Lib/test/test_csv.py
@@ -1122,19 +1122,22 @@ class TestDialectValidity(unittest.TestCase):
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"quotechar" must be a 1-character string')
+ '"quotechar" must be a unicode character or None, '
+ 'not a string of length 0')
mydialect.quotechar = "''"
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"quotechar" must be a 1-character string')
+ '"quotechar" must be a unicode character or None, '
+ 'not a string of length 2')
mydialect.quotechar = 4
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"quotechar" must be string or None, not int')
+ '"quotechar" must be a unicode character or None, '
+ 'not int')
def test_delimiter(self):
class mydialect(csv.Dialect):
@@ -1151,31 +1154,32 @@ class TestDialectValidity(unittest.TestCase):
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be a 1-character string')
+ '"delimiter" must be a unicode character, '
+ 'not a string of length 3')
mydialect.delimiter = ""
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be a 1-character string')
+ '"delimiter" must be a unicode character, not a string of length 0')
mydialect.delimiter = b","
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be string, not bytes')
+ '"delimiter" must be a unicode character, not bytes')
mydialect.delimiter = 4
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be string, not int')
+ '"delimiter" must be a unicode character, not int')
mydialect.delimiter = None
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be string, not NoneType')
+ '"delimiter" must be a unicode character, not NoneType')
def test_escapechar(self):
class mydialect(csv.Dialect):
@@ -1189,20 +1193,32 @@ class TestDialectValidity(unittest.TestCase):
self.assertEqual(d.escapechar, "\\")
mydialect.escapechar = ""
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be a 1-character string'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not a string of length 0')
mydialect.escapechar = "**"
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be a 1-character string'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not a string of length 2')
mydialect.escapechar = b"*"
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be string or None, not bytes'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not bytes')
mydialect.escapechar = 4
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be string or None, not int'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not int')
def test_lineterminator(self):
class mydialect(csv.Dialect):
@@ -1223,7 +1239,13 @@ class TestDialectValidity(unittest.TestCase):
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"lineterminator" must be a string')
+ '"lineterminator" must be a string, not int')
+
+ mydialect.lineterminator = None
+ with self.assertRaises(csv.Error) as cm:
+ mydialect()
+ self.assertEqual(str(cm.exception),
+ '"lineterminator" must be a string, not NoneType')
def test_invalid_chars(self):
def create_invalid(field_name, value, **kwargs):
diff --git a/Lib/test/test_ctypes/test_incomplete.py b/Lib/test/test_ctypes/test_incomplete.py
index fefdfe9102e..3189fcd1bd1 100644
--- a/Lib/test/test_ctypes/test_incomplete.py
+++ b/Lib/test/test_ctypes/test_incomplete.py
@@ -1,6 +1,5 @@
import ctypes
import unittest
-import warnings
from ctypes import Structure, POINTER, pointer, c_char_p
# String-based "incomplete pointers" were implemented in ctypes 0.6.3 (2003, when
@@ -21,9 +20,7 @@ class TestSetPointerType(unittest.TestCase):
_fields_ = [("name", c_char_p),
("next", lpcell)]
- with warnings.catch_warnings():
- warnings.simplefilter('ignore', DeprecationWarning)
- ctypes.SetPointerType(lpcell, cell)
+ lpcell.set_type(cell)
self.assertIs(POINTER(cell), lpcell)
@@ -50,10 +47,9 @@ class TestSetPointerType(unittest.TestCase):
_fields_ = [("name", c_char_p),
("next", lpcell)]
- with self.assertWarns(DeprecationWarning):
- ctypes.SetPointerType(lpcell, cell)
-
+ lpcell.set_type(cell)
self.assertIs(POINTER(cell), lpcell)
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py
index a10922a403e..7e8d78b8940 100644
--- a/Lib/test/test_dbm.py
+++ b/Lib/test/test_dbm.py
@@ -135,6 +135,67 @@ class AnyDBMTestCase:
assert(f[key] == b"Python:")
f.close()
+ def test_anydbm_readonly_reorganize(self):
+ self.init_db()
+ with dbm.open(_fname, 'r') as d:
+ # Early stopping.
+ if not hasattr(d, 'reorganize'):
+ self.skipTest("method reorganize not available this dbm submodule")
+
+ self.assertRaises(dbm.error, lambda: d.reorganize())
+
+ def test_anydbm_reorganize_not_changed_content(self):
+ self.init_db()
+ with dbm.open(_fname, 'c') as d:
+ # Early stopping.
+ if not hasattr(d, 'reorganize'):
+ self.skipTest("method reorganize not available this dbm submodule")
+
+ keys_before = sorted(d.keys())
+ values_before = [d[k] for k in keys_before]
+ d.reorganize()
+ keys_after = sorted(d.keys())
+ values_after = [d[k] for k in keys_before]
+ self.assertEqual(keys_before, keys_after)
+ self.assertEqual(values_before, values_after)
+
+ def test_anydbm_reorganize_decreased_size(self):
+
+ def _calculate_db_size(db_path):
+ if os.path.isfile(db_path):
+ return os.path.getsize(db_path)
+ total_size = 0
+ for root, _, filenames in os.walk(db_path):
+ for filename in filenames:
+ file_path = os.path.join(root, filename)
+ total_size += os.path.getsize(file_path)
+ return total_size
+
+ # This test requires relatively large databases to reliably show difference in size before and after reorganizing.
+ with dbm.open(_fname, 'n') as f:
+ # Early stopping.
+ if not hasattr(f, 'reorganize'):
+ self.skipTest("method reorganize not available this dbm submodule")
+
+ for k in self._dict:
+ f[k.encode('ascii')] = self._dict[k] * 100000
+ db_keys = list(f.keys())
+
+ # Make sure to calculate size of database only after file is closed to ensure file content are flushed to disk.
+ size_before = _calculate_db_size(os.path.dirname(_fname))
+
+ # Delete some elements from the start of the database.
+ keys_to_delete = db_keys[:len(db_keys) // 2]
+ with dbm.open(_fname, 'c') as f:
+ for k in keys_to_delete:
+ del f[k]
+ f.reorganize()
+
+ # Make sure to calculate size of database only after file is closed to ensure file content are flushed to disk.
+ size_after = _calculate_db_size(os.path.dirname(_fname))
+
+ self.assertLess(size_after, size_before)
+
def test_open_with_bytes(self):
dbm.open(os.fsencode(_fname), "c").close()
diff --git a/Lib/test/test_dbm_gnu.py b/Lib/test/test_dbm_gnu.py
index 66268c42a30..e0b988b7b95 100644
--- a/Lib/test/test_dbm_gnu.py
+++ b/Lib/test/test_dbm_gnu.py
@@ -74,12 +74,12 @@ class TestGdbm(unittest.TestCase):
# Test the flag parameter open() by trying all supported flag modes.
all = set(gdbm.open_flags)
# Test standard flags (presumably "crwn").
- modes = all - set('fsu')
+ modes = all - set('fsum')
for mode in sorted(modes): # put "c" mode first
self.g = gdbm.open(filename, mode)
self.g.close()
- # Test additional flags (presumably "fsu").
+ # Test additional flags (presumably "fsum").
flags = all - set('crwn')
for mode in modes:
for flag in flags:
@@ -217,6 +217,29 @@ class TestGdbm(unittest.TestCase):
create_empty_file(os.path.join(d, 'test'))
self.assertRaises(gdbm.error, gdbm.open, filename, 'r')
+ @unittest.skipUnless('m' in gdbm.open_flags, "requires 'm' in open_flags")
+ def test_nommap_no_crash(self):
+ self.g = g = gdbm.open(filename, 'nm')
+ os.truncate(filename, 0)
+
+ g.get(b'a', b'c')
+ g.keys()
+ g.firstkey()
+ g.nextkey(b'a')
+ with self.assertRaises(KeyError):
+ g[b'a']
+ with self.assertRaises(gdbm.error):
+ len(g)
+
+ with self.assertRaises(gdbm.error):
+ g[b'a'] = b'c'
+ with self.assertRaises(gdbm.error):
+ del g[b'a']
+ with self.assertRaises(gdbm.error):
+ g.setdefault(b'a', b'c')
+ with self.assertRaises(gdbm.error):
+ g.reorganize()
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py
index c0a1e378583..ef64b878805 100644
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -981,6 +981,7 @@ class FormatTest:
('.0f', '0e-2', '0'),
('.0f', '3.14159265', '3'),
('.1f', '3.14159265', '3.1'),
+ ('.01f', '3.14159265', '3.1'), # leading zero in precision
('.4f', '3.14159265', '3.1416'),
('.6f', '3.14159265', '3.141593'),
('.7f', '3.14159265', '3.1415926'), # round-half-even!
@@ -1066,6 +1067,7 @@ class FormatTest:
('8,', '123456', ' 123,456'),
('08,', '123456', '0,123,456'), # special case: extra 0 needed
('+08,', '123456', '+123,456'), # but not if there's a sign
+ ('008,', '123456', '0,123,456'), # leading zero in width
(' 08,', '123456', ' 123,456'),
('08,', '-123456', '-123,456'),
('+09,', '123456', '+0,123,456'),
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index ea076ba4fef..f6ec2cf5ce8 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -4114,6 +4114,34 @@ class ClassPropertiesAndMethods(unittest.TestCase):
else:
self.fail("shouldn't be able to create inheritance cycles")
+ def test_assign_bases_many_subclasses(self):
+ # This is intended to check that typeobject.c:queue_slot_update() can
+ # handle updating many subclasses when a slot method is re-assigned.
+ class A:
+ x = 'hello'
+ def __call__(self):
+ return 123
+ def __getitem__(self, index):
+ return None
+
+ class X:
+ x = 'bye'
+
+ class B(A):
+ pass
+
+ subclasses = []
+ for i in range(1000):
+ sc = type(f'Sub{i}', (B,), {})
+ subclasses.append(sc)
+
+ self.assertEqual(subclasses[0]()(), 123)
+ self.assertEqual(subclasses[0]().x, 'hello')
+ B.__bases__ = (X,)
+ with self.assertRaises(TypeError):
+ subclasses[0]()()
+ self.assertEqual(subclasses[0]().x, 'bye')
+
def test_builtin_bases(self):
# Make sure all the builtin types can have their base queried without
# segfaulting. See issue #5787.
diff --git a/Lib/test/test_difflib.py b/Lib/test/test_difflib.py
index 9e217249be7..6ac584a08d1 100644
--- a/Lib/test/test_difflib.py
+++ b/Lib/test/test_difflib.py
@@ -255,21 +255,21 @@ class TestSFpatches(unittest.TestCase):
html_diff = difflib.HtmlDiff()
output = html_diff.make_file(patch914575_from1.splitlines(),
patch914575_to1.splitlines())
- self.assertIn('content="text/html; charset=utf-8"', output)
+ self.assertIn('charset="utf-8"', output)
def test_make_file_iso88591_charset(self):
html_diff = difflib.HtmlDiff()
output = html_diff.make_file(patch914575_from1.splitlines(),
patch914575_to1.splitlines(),
charset='iso-8859-1')
- self.assertIn('content="text/html; charset=iso-8859-1"', output)
+ self.assertIn('charset="iso-8859-1"', output)
def test_make_file_usascii_charset_with_nonascii_input(self):
html_diff = difflib.HtmlDiff()
output = html_diff.make_file(patch914575_nonascii_from1.splitlines(),
patch914575_nonascii_to1.splitlines(),
charset='us-ascii')
- self.assertIn('content="text/html; charset=us-ascii"', output)
+ self.assertIn('charset="us-ascii"', output)
self.assertIn('&#305;mpl&#305;c&#305;t', output)
class TestDiffer(unittest.TestCase):
diff --git a/Lib/test/test_difflib_expect.html b/Lib/test/test_difflib_expect.html
index 9f33a9e9c9c..2346a6f9f8d 100644
--- a/Lib/test/test_difflib_expect.html
+++ b/Lib/test/test_difflib_expect.html
@@ -1,22 +1,42 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
-
-<html>
-
+<!DOCTYPE html>
+<html lang="en">
<head>
- <meta http-equiv="Content-Type"
- content="text/html; charset=utf-8" />
- <title></title>
- <style type="text/css">
+ <meta charset="utf-8">
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <title>Diff comparison</title>
+ <style>
:root {color-scheme: light dark}
- table.diff {font-family: Menlo, Consolas, Monaco, Liberation Mono, Lucida Console, monospace; border:medium}
- .diff_header {background-color:#e0e0e0}
- td.diff_header {text-align:right}
- .diff_next {background-color:#c0c0c0}
+ table.diff {
+ font-family: Menlo, Consolas, Monaco, Liberation Mono, Lucida Console, monospace;
+ border: medium;
+ }
+ .diff_header {
+ background-color: #e0e0e0;
+ font-weight: bold;
+ }
+ td.diff_header {
+ text-align: right;
+ padding: 0 8px;
+ }
+ .diff_next {
+ background-color: #c0c0c0;
+ padding: 4px 0;
+ }
.diff_add {background-color:palegreen}
.diff_chg {background-color:#ffff77}
.diff_sub {background-color:#ffaaaa}
+ table.diff[summary="Legends"] {
+ margin-top: 20px;
+ border: 1px solid #ccc;
+ }
+ table.diff[summary="Legends"] th {
+ background-color: #e0e0e0;
+ padding: 4px 8px;
+ }
+ table.diff[summary="Legends"] td {
+ padding: 4px 8px;
+ }
@media (prefers-color-scheme: dark) {
.diff_header {background-color:#666}
@@ -24,6 +44,8 @@
.diff_add {background-color:darkgreen}
.diff_chg {background-color:#847415}
.diff_sub {background-color:darkred}
+ table.diff[summary="Legends"] {border-color:#555}
+ table.diff[summary="Legends"] th{background-color:#666}
}
</style>
</head>
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index 547f6de5f5a..ec930a728aa 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -851,7 +851,7 @@ Disassembly of <code object <genexpr> at 0x..., file "%s", line %d>:
%4d RETURN_GENERATOR
POP_TOP
L1: RESUME 0
- LOAD_FAST_BORROW 0 (.0)
+ LOAD_FAST 0 (.0)
GET_ITER
L2: FOR_ITER 14 (to L3)
STORE_FAST 1 (z)
@@ -1821,7 +1821,7 @@ expected_opinfo_jumpy = [
make_inst(opname='LOAD_SMALL_INT', arg=10, argval=10, argrepr='', offset=12, start_offset=12, starts_line=False, line_number=3),
make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=14, start_offset=14, starts_line=False, line_number=3, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
make_inst(opname='GET_ITER', arg=None, argval=None, argrepr='', offset=22, start_offset=22, starts_line=False, line_number=3),
- make_inst(opname='FOR_ITER', arg=32, argval=92, argrepr='to L4', offset=24, start_offset=24, starts_line=False, line_number=3, label=1, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='FOR_ITER', arg=33, argval=94, argrepr='to L4', offset=24, start_offset=24, starts_line=False, line_number=3, label=1, cache_info=[('counter', 1, b'\x00\x00')]),
make_inst(opname='STORE_FAST', arg=0, argval='i', argrepr='i', offset=28, start_offset=28, starts_line=False, line_number=3),
make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=30, start_offset=30, starts_line=True, line_number=4, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=40, start_offset=40, starts_line=False, line_number=4),
@@ -1840,110 +1840,111 @@ expected_opinfo_jumpy = [
make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=82, start_offset=82, starts_line=False, line_number=7),
make_inst(opname='JUMP_BACKWARD', arg=32, argval=24, argrepr='to L1', offset=84, start_offset=84, starts_line=False, line_number=7, cache_info=[('counter', 1, b'\x00\x00')]),
make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=88, start_offset=88, starts_line=True, line_number=8, label=3),
- make_inst(opname='JUMP_FORWARD', arg=13, argval=118, argrepr='to L5', offset=90, start_offset=90, starts_line=False, line_number=8),
- make_inst(opname='END_FOR', arg=None, argval=None, argrepr='', offset=92, start_offset=92, starts_line=True, line_number=3, label=4),
- make_inst(opname='POP_ITER', arg=None, argval=None, argrepr='', offset=94, start_offset=94, starts_line=False, line_number=3),
- make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=96, start_offset=96, starts_line=True, line_number=10, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_CONST', arg=1, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=106, start_offset=106, starts_line=False, line_number=10),
- make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=108, start_offset=108, starts_line=False, line_number=10, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=116, start_offset=116, starts_line=False, line_number=10),
- make_inst(opname='LOAD_FAST_CHECK', arg=0, argval='i', argrepr='i', offset=118, start_offset=118, starts_line=True, line_number=11, label=5),
- make_inst(opname='TO_BOOL', arg=None, argval=None, argrepr='', offset=120, start_offset=120, starts_line=False, line_number=11, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_JUMP_IF_FALSE', arg=40, argval=212, argrepr='to L8', offset=128, start_offset=128, starts_line=False, line_number=11, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=132, start_offset=132, starts_line=False, line_number=11),
- make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=134, start_offset=134, starts_line=True, line_number=12, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=144, start_offset=144, starts_line=False, line_number=12),
- make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=146, start_offset=146, starts_line=False, line_number=12, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=154, start_offset=154, starts_line=False, line_number=12),
- make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=156, start_offset=156, starts_line=True, line_number=13),
- make_inst(opname='LOAD_SMALL_INT', arg=1, argval=1, argrepr='', offset=158, start_offset=158, starts_line=False, line_number=13),
- make_inst(opname='BINARY_OP', arg=23, argval=23, argrepr='-=', offset=160, start_offset=160, starts_line=False, line_number=13, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]),
- make_inst(opname='STORE_FAST', arg=0, argval='i', argrepr='i', offset=172, start_offset=172, starts_line=False, line_number=13),
- make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=174, start_offset=174, starts_line=True, line_number=14),
- make_inst(opname='LOAD_SMALL_INT', arg=6, argval=6, argrepr='', offset=176, start_offset=176, starts_line=False, line_number=14),
- make_inst(opname='COMPARE_OP', arg=148, argval='>', argrepr='bool(>)', offset=178, start_offset=178, starts_line=False, line_number=14, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='POP_JUMP_IF_FALSE', arg=3, argval=192, argrepr='to L6', offset=182, start_offset=182, starts_line=False, line_number=14, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=186, start_offset=186, starts_line=False, line_number=14),
- make_inst(opname='JUMP_BACKWARD', arg=37, argval=118, argrepr='to L5', offset=188, start_offset=188, starts_line=True, line_number=15, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=192, start_offset=192, starts_line=True, line_number=16, label=6),
- make_inst(opname='LOAD_SMALL_INT', arg=4, argval=4, argrepr='', offset=194, start_offset=194, starts_line=False, line_number=16),
- make_inst(opname='COMPARE_OP', arg=18, argval='<', argrepr='bool(<)', offset=196, start_offset=196, starts_line=False, line_number=16, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='POP_JUMP_IF_TRUE', arg=3, argval=210, argrepr='to L7', offset=200, start_offset=200, starts_line=False, line_number=16, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=204, start_offset=204, starts_line=False, line_number=16),
- make_inst(opname='JUMP_BACKWARD', arg=46, argval=118, argrepr='to L5', offset=206, start_offset=206, starts_line=False, line_number=16, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='JUMP_FORWARD', arg=11, argval=234, argrepr='to L9', offset=210, start_offset=210, starts_line=True, line_number=17, label=7),
- make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=212, start_offset=212, starts_line=True, line_number=19, label=8, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_CONST', arg=2, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=222, start_offset=222, starts_line=False, line_number=19),
- make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=224, start_offset=224, starts_line=False, line_number=19, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=232, start_offset=232, starts_line=False, line_number=19),
- make_inst(opname='NOP', arg=None, argval=None, argrepr='', offset=234, start_offset=234, starts_line=True, line_number=20, label=9),
- make_inst(opname='LOAD_SMALL_INT', arg=1, argval=1, argrepr='', offset=236, start_offset=236, starts_line=True, line_number=21),
- make_inst(opname='LOAD_SMALL_INT', arg=0, argval=0, argrepr='', offset=238, start_offset=238, starts_line=False, line_number=21),
- make_inst(opname='BINARY_OP', arg=11, argval=11, argrepr='/', offset=240, start_offset=240, starts_line=False, line_number=21, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=252, start_offset=252, starts_line=False, line_number=21),
- make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=254, start_offset=254, starts_line=True, line_number=25),
- make_inst(opname='COPY', arg=1, argval=1, argrepr='', offset=256, start_offset=256, starts_line=False, line_number=25),
- make_inst(opname='LOAD_SPECIAL', arg=1, argval=1, argrepr='__exit__', offset=258, start_offset=258, starts_line=False, line_number=25),
- make_inst(opname='SWAP', arg=2, argval=2, argrepr='', offset=260, start_offset=260, starts_line=False, line_number=25),
- make_inst(opname='SWAP', arg=3, argval=3, argrepr='', offset=262, start_offset=262, starts_line=False, line_number=25),
- make_inst(opname='LOAD_SPECIAL', arg=0, argval=0, argrepr='__enter__', offset=264, start_offset=264, starts_line=False, line_number=25),
- make_inst(opname='CALL', arg=0, argval=0, argrepr='', offset=266, start_offset=266, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='STORE_FAST', arg=1, argval='dodgy', argrepr='dodgy', offset=274, start_offset=274, starts_line=False, line_number=25),
- make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=276, start_offset=276, starts_line=True, line_number=26, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_CONST', arg=3, argval='Never reach this', argrepr="'Never reach this'", offset=286, start_offset=286, starts_line=False, line_number=26),
- make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=288, start_offset=288, starts_line=False, line_number=26, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=296, start_offset=296, starts_line=False, line_number=26),
- make_inst(opname='LOAD_CONST', arg=4, argval=None, argrepr='None', offset=298, start_offset=298, starts_line=True, line_number=25),
- make_inst(opname='LOAD_CONST', arg=4, argval=None, argrepr='None', offset=300, start_offset=300, starts_line=False, line_number=25),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=90, start_offset=90, starts_line=False, line_number=8),
+ make_inst(opname='JUMP_FORWARD', arg=13, argval=120, argrepr='to L5', offset=92, start_offset=92, starts_line=False, line_number=8),
+ make_inst(opname='END_FOR', arg=None, argval=None, argrepr='', offset=94, start_offset=94, starts_line=True, line_number=3, label=4),
+ make_inst(opname='POP_ITER', arg=None, argval=None, argrepr='', offset=96, start_offset=96, starts_line=False, line_number=3),
+ make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=98, start_offset=98, starts_line=True, line_number=10, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_CONST', arg=1, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=108, start_offset=108, starts_line=False, line_number=10),
+ make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=110, start_offset=110, starts_line=False, line_number=10, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=118, start_offset=118, starts_line=False, line_number=10),
+ make_inst(opname='LOAD_FAST_CHECK', arg=0, argval='i', argrepr='i', offset=120, start_offset=120, starts_line=True, line_number=11, label=5),
+ make_inst(opname='TO_BOOL', arg=None, argval=None, argrepr='', offset=122, start_offset=122, starts_line=False, line_number=11, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_JUMP_IF_FALSE', arg=40, argval=214, argrepr='to L8', offset=130, start_offset=130, starts_line=False, line_number=11, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=134, start_offset=134, starts_line=False, line_number=11),
+ make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=136, start_offset=136, starts_line=True, line_number=12, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=146, start_offset=146, starts_line=False, line_number=12),
+ make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=148, start_offset=148, starts_line=False, line_number=12, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=156, start_offset=156, starts_line=False, line_number=12),
+ make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=158, start_offset=158, starts_line=True, line_number=13),
+ make_inst(opname='LOAD_SMALL_INT', arg=1, argval=1, argrepr='', offset=160, start_offset=160, starts_line=False, line_number=13),
+ make_inst(opname='BINARY_OP', arg=23, argval=23, argrepr='-=', offset=162, start_offset=162, starts_line=False, line_number=13, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]),
+ make_inst(opname='STORE_FAST', arg=0, argval='i', argrepr='i', offset=174, start_offset=174, starts_line=False, line_number=13),
+ make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=176, start_offset=176, starts_line=True, line_number=14),
+ make_inst(opname='LOAD_SMALL_INT', arg=6, argval=6, argrepr='', offset=178, start_offset=178, starts_line=False, line_number=14),
+ make_inst(opname='COMPARE_OP', arg=148, argval='>', argrepr='bool(>)', offset=180, start_offset=180, starts_line=False, line_number=14, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='POP_JUMP_IF_FALSE', arg=3, argval=194, argrepr='to L6', offset=184, start_offset=184, starts_line=False, line_number=14, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=188, start_offset=188, starts_line=False, line_number=14),
+ make_inst(opname='JUMP_BACKWARD', arg=37, argval=120, argrepr='to L5', offset=190, start_offset=190, starts_line=True, line_number=15, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=194, start_offset=194, starts_line=True, line_number=16, label=6),
+ make_inst(opname='LOAD_SMALL_INT', arg=4, argval=4, argrepr='', offset=196, start_offset=196, starts_line=False, line_number=16),
+ make_inst(opname='COMPARE_OP', arg=18, argval='<', argrepr='bool(<)', offset=198, start_offset=198, starts_line=False, line_number=16, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='POP_JUMP_IF_TRUE', arg=3, argval=212, argrepr='to L7', offset=202, start_offset=202, starts_line=False, line_number=16, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=206, start_offset=206, starts_line=False, line_number=16),
+ make_inst(opname='JUMP_BACKWARD', arg=46, argval=120, argrepr='to L5', offset=208, start_offset=208, starts_line=False, line_number=16, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='JUMP_FORWARD', arg=11, argval=236, argrepr='to L9', offset=212, start_offset=212, starts_line=True, line_number=17, label=7),
+ make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=214, start_offset=214, starts_line=True, line_number=19, label=8, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_CONST', arg=2, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=224, start_offset=224, starts_line=False, line_number=19),
+ make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=226, start_offset=226, starts_line=False, line_number=19, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=234, start_offset=234, starts_line=False, line_number=19),
+ make_inst(opname='NOP', arg=None, argval=None, argrepr='', offset=236, start_offset=236, starts_line=True, line_number=20, label=9),
+ make_inst(opname='LOAD_SMALL_INT', arg=1, argval=1, argrepr='', offset=238, start_offset=238, starts_line=True, line_number=21),
+ make_inst(opname='LOAD_SMALL_INT', arg=0, argval=0, argrepr='', offset=240, start_offset=240, starts_line=False, line_number=21),
+ make_inst(opname='BINARY_OP', arg=11, argval=11, argrepr='/', offset=242, start_offset=242, starts_line=False, line_number=21, cache_info=[('counter', 1, b'\x00\x00'), ('descr', 4, b'\x00\x00\x00\x00\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=254, start_offset=254, starts_line=False, line_number=21),
+ make_inst(opname='LOAD_FAST_BORROW', arg=0, argval='i', argrepr='i', offset=256, start_offset=256, starts_line=True, line_number=25),
+ make_inst(opname='COPY', arg=1, argval=1, argrepr='', offset=258, start_offset=258, starts_line=False, line_number=25),
+ make_inst(opname='LOAD_SPECIAL', arg=1, argval=1, argrepr='__exit__', offset=260, start_offset=260, starts_line=False, line_number=25),
+ make_inst(opname='SWAP', arg=2, argval=2, argrepr='', offset=262, start_offset=262, starts_line=False, line_number=25),
+ make_inst(opname='SWAP', arg=3, argval=3, argrepr='', offset=264, start_offset=264, starts_line=False, line_number=25),
+ make_inst(opname='LOAD_SPECIAL', arg=0, argval=0, argrepr='__enter__', offset=266, start_offset=266, starts_line=False, line_number=25),
+ make_inst(opname='CALL', arg=0, argval=0, argrepr='', offset=268, start_offset=268, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='STORE_FAST', arg=1, argval='dodgy', argrepr='dodgy', offset=276, start_offset=276, starts_line=False, line_number=25),
+ make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=278, start_offset=278, starts_line=True, line_number=26, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_CONST', arg=3, argval='Never reach this', argrepr="'Never reach this'", offset=288, start_offset=288, starts_line=False, line_number=26),
+ make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=290, start_offset=290, starts_line=False, line_number=26, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=298, start_offset=298, starts_line=False, line_number=26),
+ make_inst(opname='LOAD_CONST', arg=4, argval=None, argrepr='None', offset=300, start_offset=300, starts_line=True, line_number=25),
make_inst(opname='LOAD_CONST', arg=4, argval=None, argrepr='None', offset=302, start_offset=302, starts_line=False, line_number=25),
- make_inst(opname='CALL', arg=3, argval=3, argrepr='', offset=304, start_offset=304, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=312, start_offset=312, starts_line=False, line_number=25),
- make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=314, start_offset=314, starts_line=True, line_number=28, label=10, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_CONST', arg=6, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=324, start_offset=324, starts_line=False, line_number=28),
- make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=326, start_offset=326, starts_line=False, line_number=28, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=334, start_offset=334, starts_line=False, line_number=28),
- make_inst(opname='LOAD_CONST', arg=4, argval=None, argrepr='None', offset=336, start_offset=336, starts_line=False, line_number=28),
- make_inst(opname='RETURN_VALUE', arg=None, argval=None, argrepr='', offset=338, start_offset=338, starts_line=False, line_number=28),
- make_inst(opname='PUSH_EXC_INFO', arg=None, argval=None, argrepr='', offset=340, start_offset=340, starts_line=True, line_number=25),
- make_inst(opname='WITH_EXCEPT_START', arg=None, argval=None, argrepr='', offset=342, start_offset=342, starts_line=False, line_number=25),
- make_inst(opname='TO_BOOL', arg=None, argval=None, argrepr='', offset=344, start_offset=344, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_JUMP_IF_TRUE', arg=2, argval=360, argrepr='to L11', offset=352, start_offset=352, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=356, start_offset=356, starts_line=False, line_number=25),
- make_inst(opname='RERAISE', arg=2, argval=2, argrepr='', offset=358, start_offset=358, starts_line=False, line_number=25),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=25, label=11),
- make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=362, start_offset=362, starts_line=False, line_number=25),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=364, start_offset=364, starts_line=False, line_number=25),
+ make_inst(opname='LOAD_CONST', arg=4, argval=None, argrepr='None', offset=304, start_offset=304, starts_line=False, line_number=25),
+ make_inst(opname='CALL', arg=3, argval=3, argrepr='', offset=306, start_offset=306, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=314, start_offset=314, starts_line=False, line_number=25),
+ make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=316, start_offset=316, starts_line=True, line_number=28, label=10, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_CONST', arg=6, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=326, start_offset=326, starts_line=False, line_number=28),
+ make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=328, start_offset=328, starts_line=False, line_number=28, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=336, start_offset=336, starts_line=False, line_number=28),
+ make_inst(opname='LOAD_CONST', arg=4, argval=None, argrepr='None', offset=338, start_offset=338, starts_line=False, line_number=28),
+ make_inst(opname='RETURN_VALUE', arg=None, argval=None, argrepr='', offset=340, start_offset=340, starts_line=False, line_number=28),
+ make_inst(opname='PUSH_EXC_INFO', arg=None, argval=None, argrepr='', offset=342, start_offset=342, starts_line=True, line_number=25),
+ make_inst(opname='WITH_EXCEPT_START', arg=None, argval=None, argrepr='', offset=344, start_offset=344, starts_line=False, line_number=25),
+ make_inst(opname='TO_BOOL', arg=None, argval=None, argrepr='', offset=346, start_offset=346, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00'), ('version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_JUMP_IF_TRUE', arg=2, argval=362, argrepr='to L11', offset=354, start_offset=354, starts_line=False, line_number=25, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=358, start_offset=358, starts_line=False, line_number=25),
+ make_inst(opname='RERAISE', arg=2, argval=2, argrepr='', offset=360, start_offset=360, starts_line=False, line_number=25),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=362, start_offset=362, starts_line=False, line_number=25, label=11),
+ make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=364, start_offset=364, starts_line=False, line_number=25),
make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=366, start_offset=366, starts_line=False, line_number=25),
make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=368, start_offset=368, starts_line=False, line_number=25),
- make_inst(opname='JUMP_BACKWARD_NO_INTERRUPT', arg=29, argval=314, argrepr='to L10', offset=370, start_offset=370, starts_line=False, line_number=25),
- make_inst(opname='COPY', arg=3, argval=3, argrepr='', offset=372, start_offset=372, starts_line=True, line_number=None),
- make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=374, start_offset=374, starts_line=False, line_number=None),
- make_inst(opname='RERAISE', arg=1, argval=1, argrepr='', offset=376, start_offset=376, starts_line=False, line_number=None),
- make_inst(opname='PUSH_EXC_INFO', arg=None, argval=None, argrepr='', offset=378, start_offset=378, starts_line=False, line_number=None),
- make_inst(opname='LOAD_GLOBAL', arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=380, start_offset=380, starts_line=True, line_number=22, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='CHECK_EXC_MATCH', arg=None, argval=None, argrepr='', offset=390, start_offset=390, starts_line=False, line_number=22),
- make_inst(opname='POP_JUMP_IF_FALSE', arg=15, argval=426, argrepr='to L12', offset=392, start_offset=392, starts_line=False, line_number=22, cache_info=[('counter', 1, b'\x00\x00')]),
- make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=396, start_offset=396, starts_line=False, line_number=22),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=398, start_offset=398, starts_line=False, line_number=22),
- make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=400, start_offset=400, starts_line=True, line_number=23, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_CONST', arg=5, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=410, start_offset=410, starts_line=False, line_number=23),
- make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=412, start_offset=412, starts_line=False, line_number=23, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=420, start_offset=420, starts_line=False, line_number=23),
- make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=422, start_offset=422, starts_line=False, line_number=23),
- make_inst(opname='JUMP_BACKWARD_NO_INTERRUPT', arg=56, argval=314, argrepr='to L10', offset=424, start_offset=424, starts_line=False, line_number=23),
- make_inst(opname='RERAISE', arg=0, argval=0, argrepr='', offset=426, start_offset=426, starts_line=True, line_number=22, label=12),
- make_inst(opname='COPY', arg=3, argval=3, argrepr='', offset=428, start_offset=428, starts_line=True, line_number=None),
- make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=430, start_offset=430, starts_line=False, line_number=None),
- make_inst(opname='RERAISE', arg=1, argval=1, argrepr='', offset=432, start_offset=432, starts_line=False, line_number=None),
- make_inst(opname='PUSH_EXC_INFO', arg=None, argval=None, argrepr='', offset=434, start_offset=434, starts_line=False, line_number=None),
- make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=436, start_offset=436, starts_line=True, line_number=28, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
- make_inst(opname='LOAD_CONST', arg=6, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=446, start_offset=446, starts_line=False, line_number=28),
- make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=448, start_offset=448, starts_line=False, line_number=28, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
- make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=456, start_offset=456, starts_line=False, line_number=28),
- make_inst(opname='RERAISE', arg=0, argval=0, argrepr='', offset=458, start_offset=458, starts_line=False, line_number=28),
- make_inst(opname='COPY', arg=3, argval=3, argrepr='', offset=460, start_offset=460, starts_line=True, line_number=None),
- make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=462, start_offset=462, starts_line=False, line_number=None),
- make_inst(opname='RERAISE', arg=1, argval=1, argrepr='', offset=464, start_offset=464, starts_line=False, line_number=None),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=370, start_offset=370, starts_line=False, line_number=25),
+ make_inst(opname='JUMP_BACKWARD_NO_INTERRUPT', arg=29, argval=316, argrepr='to L10', offset=372, start_offset=372, starts_line=False, line_number=25),
+ make_inst(opname='COPY', arg=3, argval=3, argrepr='', offset=374, start_offset=374, starts_line=True, line_number=None),
+ make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=376, start_offset=376, starts_line=False, line_number=None),
+ make_inst(opname='RERAISE', arg=1, argval=1, argrepr='', offset=378, start_offset=378, starts_line=False, line_number=None),
+ make_inst(opname='PUSH_EXC_INFO', arg=None, argval=None, argrepr='', offset=380, start_offset=380, starts_line=False, line_number=None),
+ make_inst(opname='LOAD_GLOBAL', arg=4, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=382, start_offset=382, starts_line=True, line_number=22, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='CHECK_EXC_MATCH', arg=None, argval=None, argrepr='', offset=392, start_offset=392, starts_line=False, line_number=22),
+ make_inst(opname='POP_JUMP_IF_FALSE', arg=15, argval=428, argrepr='to L12', offset=394, start_offset=394, starts_line=False, line_number=22, cache_info=[('counter', 1, b'\x00\x00')]),
+ make_inst(opname='NOT_TAKEN', arg=None, argval=None, argrepr='', offset=398, start_offset=398, starts_line=False, line_number=22),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=400, start_offset=400, starts_line=False, line_number=22),
+ make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=402, start_offset=402, starts_line=True, line_number=23, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_CONST', arg=5, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=412, start_offset=412, starts_line=False, line_number=23),
+ make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=414, start_offset=414, starts_line=False, line_number=23, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=422, start_offset=422, starts_line=False, line_number=23),
+ make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=424, start_offset=424, starts_line=False, line_number=23),
+ make_inst(opname='JUMP_BACKWARD_NO_INTERRUPT', arg=56, argval=316, argrepr='to L10', offset=426, start_offset=426, starts_line=False, line_number=23),
+ make_inst(opname='RERAISE', arg=0, argval=0, argrepr='', offset=428, start_offset=428, starts_line=True, line_number=22, label=12),
+ make_inst(opname='COPY', arg=3, argval=3, argrepr='', offset=430, start_offset=430, starts_line=True, line_number=None),
+ make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=432, start_offset=432, starts_line=False, line_number=None),
+ make_inst(opname='RERAISE', arg=1, argval=1, argrepr='', offset=434, start_offset=434, starts_line=False, line_number=None),
+ make_inst(opname='PUSH_EXC_INFO', arg=None, argval=None, argrepr='', offset=436, start_offset=436, starts_line=False, line_number=None),
+ make_inst(opname='LOAD_GLOBAL', arg=3, argval='print', argrepr='print + NULL', offset=438, start_offset=438, starts_line=True, line_number=28, cache_info=[('counter', 1, b'\x00\x00'), ('index', 1, b'\x00\x00'), ('module_keys_version', 1, b'\x00\x00'), ('builtin_keys_version', 1, b'\x00\x00')]),
+ make_inst(opname='LOAD_CONST', arg=6, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=448, start_offset=448, starts_line=False, line_number=28),
+ make_inst(opname='CALL', arg=1, argval=1, argrepr='', offset=450, start_offset=450, starts_line=False, line_number=28, cache_info=[('counter', 1, b'\x00\x00'), ('func_version', 2, b'\x00\x00\x00\x00')]),
+ make_inst(opname='POP_TOP', arg=None, argval=None, argrepr='', offset=458, start_offset=458, starts_line=False, line_number=28),
+ make_inst(opname='RERAISE', arg=0, argval=0, argrepr='', offset=460, start_offset=460, starts_line=False, line_number=28),
+ make_inst(opname='COPY', arg=3, argval=3, argrepr='', offset=462, start_offset=462, starts_line=True, line_number=None),
+ make_inst(opname='POP_EXCEPT', arg=None, argval=None, argrepr='', offset=464, start_offset=464, starts_line=False, line_number=None),
+ make_inst(opname='RERAISE', arg=1, argval=1, argrepr='', offset=466, start_offset=466, starts_line=False, line_number=None),
]
# One last piece of inspect fodder to check the default line number handling
diff --git a/Lib/test/test_doctest/sample_doctest_errors.py b/Lib/test/test_doctest/sample_doctest_errors.py
new file mode 100644
index 00000000000..4a6f07af2d4
--- /dev/null
+++ b/Lib/test/test_doctest/sample_doctest_errors.py
@@ -0,0 +1,46 @@
+"""This is a sample module used for testing doctest.
+
+This module includes various scenarios involving errors.
+
+>>> 2 + 2
+5
+>>> 1/0
+1
+"""
+
+def g():
+ [][0] # line 12
+
+def errors():
+ """
+ >>> 2 + 2
+ 5
+ >>> 1/0
+ 1
+ >>> def f():
+ ... 2 + '2'
+ ...
+ >>> f()
+ 1
+ >>> g()
+ 1
+ """
+
+def syntax_error():
+ """
+ >>> 2+*3
+ 5
+ """
+
+__test__ = {
+ 'bad': """
+ >>> 2 + 2
+ 5
+ >>> 1/0
+ 1
+ """,
+}
+
+def test_suite():
+ import doctest
+ return doctest.DocTestSuite()
diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py
index a4a49298bab..72763d4a013 100644
--- a/Lib/test/test_doctest/test_doctest.py
+++ b/Lib/test/test_doctest/test_doctest.py
@@ -2267,14 +2267,24 @@ def test_DocTestSuite():
>>> import unittest
>>> import test.test_doctest.sample_doctest
>>> suite = doctest.DocTestSuite(test.test_doctest.sample_doctest)
- >>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ >>> result = suite.run(unittest.TestResult())
+ >>> result
+ <unittest.result.TestResult run=9 errors=2 failures=2>
+ >>> for tst, _ in result.failures:
+ ... print(tst)
+ bad (test.test_doctest.sample_doctest.__test__) [0]
+ foo (test.test_doctest.sample_doctest) [0]
+ >>> for tst, _ in result.errors:
+ ... print(tst)
+ test_silly_setup (test.test_doctest.sample_doctest) [1]
+ y_is_one (test.test_doctest.sample_doctest) [0]
We can also supply the module by name:
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest')
- >>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ >>> result = suite.run(unittest.TestResult())
+ >>> result
+ <unittest.result.TestResult run=9 errors=2 failures=2>
The module need not contain any doctest examples:
@@ -2296,13 +2306,26 @@ def test_DocTestSuite():
>>> result
<unittest.result.TestResult run=6 errors=0 failures=2>
>>> len(result.skipped)
- 2
+ 7
+ >>> for tst, _ in result.skipped:
+ ... print(tst)
+ double_skip (test.test_doctest.sample_doctest_skip) [0]
+ double_skip (test.test_doctest.sample_doctest_skip) [1]
+ double_skip (test.test_doctest.sample_doctest_skip)
+ partial_skip_fail (test.test_doctest.sample_doctest_skip) [0]
+ partial_skip_pass (test.test_doctest.sample_doctest_skip) [0]
+ single_skip (test.test_doctest.sample_doctest_skip) [0]
+ single_skip (test.test_doctest.sample_doctest_skip)
+ >>> for tst, _ in result.failures:
+ ... print(tst)
+ no_skip_fail (test.test_doctest.sample_doctest_skip) [0]
+ partial_skip_fail (test.test_doctest.sample_doctest_skip) [1]
We can use the current module:
>>> suite = test.test_doctest.sample_doctest.test_suite()
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
We can also provide a DocTestFinder:
@@ -2310,7 +2333,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... test_finder=finder)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
The DocTestFinder need not return any tests:
@@ -2326,7 +2349,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', globs={})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=5>
+ <unittest.result.TestResult run=9 errors=3 failures=2>
Alternatively, we can provide extra globals. Here we'll make an
error go away by providing an extra global variable:
@@ -2334,7 +2357,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... extraglobs={'y': 1})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=3>
+ <unittest.result.TestResult run=9 errors=1 failures=2>
You can pass option flags. Here we'll cause an extra error
by disabling the blank-line feature:
@@ -2342,7 +2365,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=5>
+ <unittest.result.TestResult run=9 errors=2 failures=3>
You can supply setUp and tearDown functions:
@@ -2359,7 +2382,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... setUp=setUp, tearDown=tearDown)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=3>
+ <unittest.result.TestResult run=9 errors=1 failures=2>
But the tearDown restores sanity:
@@ -2377,13 +2400,115 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', setUp=setUp)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=3>
+ <unittest.result.TestResult run=9 errors=1 failures=2>
Here, we didn't need to use a tearDown function because we
modified the test globals, which are a copy of the
sample_doctest module dictionary. The test globals are
automatically cleared for us after a test.
- """
+ """
+
+def test_DocTestSuite_errors():
+ """Tests for error reporting in DocTestSuite.
+
+ >>> import unittest
+ >>> import test.test_doctest.sample_doctest_errors as mod
+ >>> suite = doctest.DocTestSuite(mod)
+ >>> result = suite.run(unittest.TestResult())
+ >>> result
+ <unittest.result.TestResult run=4 errors=6 failures=3>
+ >>> print(result.failures[0][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 5, in test.test_doctest.sample_doctest_errors
+ >...>> 2 + 2
+ AssertionError: Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ <BLANKLINE>
+ >>> print(result.failures[1][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line None, in test.test_doctest.sample_doctest_errors.__test__.bad
+ AssertionError: Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ <BLANKLINE>
+ >>> print(result.failures[2][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 16, in test.test_doctest.sample_doctest_errors.errors
+ >...>> 2 + 2
+ AssertionError: Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ <BLANKLINE>
+ >>> print(result.errors[0][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 7, in test.test_doctest.sample_doctest_errors
+ >...>> 1/0
+ File "<doctest test.test_doctest.sample_doctest_errors[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ <BLANKLINE>
+ >>> print(result.errors[1][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line None, in test.test_doctest.sample_doctest_errors.__test__.bad
+ File "<doctest test.test_doctest.sample_doctest_errors.__test__.bad[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ <BLANKLINE>
+ >>> print(result.errors[2][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 18, in test.test_doctest.sample_doctest_errors.errors
+ >...>> 1/0
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ <BLANKLINE>
+ >>> print(result.errors[3][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 23, in test.test_doctest.sample_doctest_errors.errors
+ >...>> f()
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[3]>", line 1, in <module>
+ f()
+ ~^^
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[2]>", line 2, in f
+ 2 + '2'
+ ~~^~~~~
+ TypeError: ...
+ <BLANKLINE>
+ >>> print(result.errors[4][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 25, in test.test_doctest.sample_doctest_errors.errors
+ >...>> g()
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[4]>", line 1, in <module>
+ g()
+ ~^^
+ File "...sample_doctest_errors.py", line 12, in g
+ [][0] # line 12
+ ~~^^^
+ IndexError: list index out of range
+ <BLANKLINE>
+ >>> print(result.errors[5][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 31, in test.test_doctest.sample_doctest_errors.syntax_error
+ >...>> 2+*3
+ File "<doctest test.test_doctest.sample_doctest_errors.syntax_error[0]>", line 1
+ 2+*3
+ ^
+ SyntaxError: invalid syntax
+ <BLANKLINE>
+ """
def test_DocFileSuite():
"""We can test tests found in text files using a DocFileSuite.
@@ -2396,7 +2521,7 @@ def test_DocFileSuite():
... 'test_doctest2.txt',
... 'test_doctest4.txt')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=2 failures=0>
The test files are looked for in the directory containing the
calling module. A package keyword argument can be provided to
@@ -2408,14 +2533,14 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... package='test.test_doctest')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=2 failures=0>
'/' should be used as a path separator. It will be converted
to a native separator at run time:
>>> suite = doctest.DocFileSuite('../test_doctest/test_doctest.txt')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=1 errors=0 failures=1>
+ <unittest.result.TestResult run=1 errors=1 failures=0>
If DocFileSuite is used from an interactive session, then files
are resolved relative to the directory of sys.argv[0]:
@@ -2441,7 +2566,7 @@ def test_DocFileSuite():
>>> suite = doctest.DocFileSuite(test_file, module_relative=False)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=1 errors=0 failures=1>
+ <unittest.result.TestResult run=1 errors=1 failures=0>
It is an error to specify `package` when `module_relative=False`:
@@ -2455,12 +2580,19 @@ def test_DocFileSuite():
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... 'test_doctest4.txt',
- ... 'test_doctest_skip.txt')
+ ... 'test_doctest_skip.txt',
+ ... 'test_doctest_skip2.txt')
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=3 errors=0 failures=1>
- >>> len(result.skipped)
- 1
+ <unittest.result.TestResult run=4 errors=1 failures=0>
+ >>> len(result.skipped)
+ 4
+ >>> for tst, _ in result.skipped: # doctest: +ELLIPSIS
+ ... print('=', tst)
+ = ...test_doctest_skip.txt [0]
+ = ...test_doctest_skip.txt [1]
+ = ...test_doctest_skip.txt
+ = ...test_doctest_skip2.txt [0]
You can specify initial global variables:
@@ -2469,7 +2601,7 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... globs={'favorite_color': 'blue'})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=1>
+ <unittest.result.TestResult run=3 errors=1 failures=0>
In this case, we supplied a missing favorite color. You can
provide doctest options:
@@ -2480,7 +2612,7 @@ def test_DocFileSuite():
... optionflags=doctest.DONT_ACCEPT_BLANKLINE,
... globs={'favorite_color': 'blue'})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=1 failures=1>
And, you can provide setUp and tearDown functions:
@@ -2499,7 +2631,7 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... setUp=setUp, tearDown=tearDown)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=1>
+ <unittest.result.TestResult run=3 errors=1 failures=0>
But the tearDown restores sanity:
@@ -2541,9 +2673,60 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... encoding='utf-8')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=2 failures=0>
+ """
- """
+def test_DocFileSuite_errors():
+ """Tests for error reporting in DocTestSuite.
+
+ >>> import unittest
+ >>> suite = doctest.DocFileSuite('test_doctest_errors.txt')
+ >>> result = suite.run(unittest.TestResult())
+ >>> result
+ <unittest.result.TestResult run=1 errors=3 failures=1>
+ >>> print(result.failures[0][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...test_doctest_errors.txt", line 4, in test_doctest_errors.txt
+ >...>> 2 + 2
+ AssertionError: Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ <BLANKLINE>
+ >>> print(result.errors[0][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...test_doctest_errors.txt", line 6, in test_doctest_errors.txt
+ >...>> 1/0
+ File "<doctest test_doctest_errors.txt[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ <BLANKLINE>
+ >>> print(result.errors[1][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...test_doctest_errors.txt", line 11, in test_doctest_errors.txt
+ >...>> f()
+ File "<doctest test_doctest_errors.txt[3]>", line 1, in <module>
+ f()
+ ~^^
+ File "<doctest test_doctest_errors.txt[2]>", line 2, in f
+ 2 + '2'
+ ~~^~~~~
+ TypeError: ...
+ <BLANKLINE>
+ >>> print(result.errors[2][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...test_doctest_errors.txt", line 13, in test_doctest_errors.txt
+ >...>> 2+*3
+ File "<doctest test_doctest_errors.txt[4]>", line 1
+ 2+*3
+ ^
+ SyntaxError: invalid syntax
+ <BLANKLINE>
+
+ """
def test_trailing_space_in_test():
"""
@@ -2612,14 +2795,26 @@ def test_unittest_reportflags():
... optionflags=doctest.DONT_ACCEPT_BLANKLINE)
>>> import unittest
>>> result = suite.run(unittest.TestResult())
+ >>> result
+ <unittest.result.TestResult run=1 errors=1 failures=1>
>>> print(result.failures[0][1]) # doctest: +ELLIPSIS
- Traceback ...
- Failed example:
- favorite_color
- ...
- Failed example:
+ Traceback (most recent call last):
+ File ...
+ >...>> if 1:
+ AssertionError: Failed example:
if 1:
- ...
+ print('a')
+ print()
+ print('b')
+ Expected:
+ a
+ <BLANKLINE>
+ b
+ Got:
+ a
+ <BLANKLINE>
+ b
+ <BLANKLINE>
Note that we see both failures displayed.
@@ -2628,16 +2823,8 @@ def test_unittest_reportflags():
Now, when we run the test:
- >>> result = suite.run(unittest.TestResult())
- >>> print(result.failures[0][1]) # doctest: +ELLIPSIS
- Traceback ...
- Failed example:
- favorite_color
- Exception raised:
- ...
- NameError: name 'favorite_color' is not defined
- <BLANKLINE>
- <BLANKLINE>
+ >>> suite.run(unittest.TestResult())
+ <unittest.result.TestResult run=1 errors=1 failures=0>
We get only the first failure.
@@ -2647,19 +2834,20 @@ def test_unittest_reportflags():
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE | doctest.REPORT_NDIFF)
- Then the default eporting options are ignored:
+ Then the default reporting options are ignored:
>>> result = suite.run(unittest.TestResult())
+ >>> result
+ <unittest.result.TestResult run=1 errors=1 failures=1>
*NOTE*: These doctest are intentionally not placed in raw string to depict
the trailing whitespace using `\x20` in the diff below.
>>> print(result.failures[0][1]) # doctest: +ELLIPSIS
Traceback ...
- Failed example:
- favorite_color
- ...
- Failed example:
+ File ...
+ >...>> if 1:
+ AssertionError: Failed example:
if 1:
print('a')
print()
@@ -2670,7 +2858,6 @@ def test_unittest_reportflags():
+\x20
b
<BLANKLINE>
- <BLANKLINE>
Test runners can restore the formatting flags after they run:
@@ -2860,6 +3047,57 @@ Test the verbose output:
>>> _colorize.COLORIZE = save_colorize
"""
+def test_testfile_errors(): r"""
+Tests for error reporting in the testfile() function.
+
+ >>> doctest.testfile('test_doctest_errors.txt', verbose=False) # doctest: +ELLIPSIS
+ **********************************************************************
+ File "...test_doctest_errors.txt", line 4, in test_doctest_errors.txt
+ Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ **********************************************************************
+ File "...test_doctest_errors.txt", line 6, in test_doctest_errors.txt
+ Failed example:
+ 1/0
+ Exception raised:
+ Traceback (most recent call last):
+ File "<doctest test_doctest_errors.txt[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ **********************************************************************
+ File "...test_doctest_errors.txt", line 11, in test_doctest_errors.txt
+ Failed example:
+ f()
+ Exception raised:
+ Traceback (most recent call last):
+ File "<doctest test_doctest_errors.txt[3]>", line 1, in <module>
+ f()
+ ~^^
+ File "<doctest test_doctest_errors.txt[2]>", line 2, in f
+ 2 + '2'
+ ~~^~~~~
+ TypeError: ...
+ **********************************************************************
+ File "...test_doctest_errors.txt", line 13, in test_doctest_errors.txt
+ Failed example:
+ 2+*3
+ Exception raised:
+ File "<doctest test_doctest_errors.txt[4]>", line 1
+ 2+*3
+ ^
+ SyntaxError: invalid syntax
+ **********************************************************************
+ 1 item had failures:
+ 4 of 5 in test_doctest_errors.txt
+ ***Test Failed*** 4 failures.
+ TestResults(failed=4, attempted=5)
+"""
+
class TestImporter(importlib.abc.MetaPathFinder):
def find_spec(self, fullname, path, target=None):
@@ -2990,6 +3228,110 @@ out of the binary module.
TestResults(failed=0, attempted=0)
"""
+def test_testmod_errors(): r"""
+Tests for error reporting in the testmod() function.
+
+ >>> import test.test_doctest.sample_doctest_errors as mod
+ >>> doctest.testmod(mod, verbose=False) # doctest: +ELLIPSIS
+ **********************************************************************
+ File "...sample_doctest_errors.py", line 5, in test.test_doctest.sample_doctest_errors
+ Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ **********************************************************************
+ File "...sample_doctest_errors.py", line 7, in test.test_doctest.sample_doctest_errors
+ Failed example:
+ 1/0
+ Exception raised:
+ Traceback (most recent call last):
+ File "<doctest test.test_doctest.sample_doctest_errors[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ **********************************************************************
+ File "...sample_doctest_errors.py", line ?, in test.test_doctest.sample_doctest_errors.__test__.bad
+ Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ **********************************************************************
+ File "...sample_doctest_errors.py", line ?, in test.test_doctest.sample_doctest_errors.__test__.bad
+ Failed example:
+ 1/0
+ Exception raised:
+ Traceback (most recent call last):
+ File "<doctest test.test_doctest.sample_doctest_errors.__test__.bad[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ **********************************************************************
+ File "...sample_doctest_errors.py", line 16, in test.test_doctest.sample_doctest_errors.errors
+ Failed example:
+ 2 + 2
+ Expected:
+ 5
+ Got:
+ 4
+ **********************************************************************
+ File "...sample_doctest_errors.py", line 18, in test.test_doctest.sample_doctest_errors.errors
+ Failed example:
+ 1/0
+ Exception raised:
+ Traceback (most recent call last):
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
+ **********************************************************************
+ File "...sample_doctest_errors.py", line 23, in test.test_doctest.sample_doctest_errors.errors
+ Failed example:
+ f()
+ Exception raised:
+ Traceback (most recent call last):
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[3]>", line 1, in <module>
+ f()
+ ~^^
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[2]>", line 2, in f
+ 2 + '2'
+ ~~^~~~~
+ TypeError: ...
+ **********************************************************************
+ File "...sample_doctest_errors.py", line 25, in test.test_doctest.sample_doctest_errors.errors
+ Failed example:
+ g()
+ Exception raised:
+ Traceback (most recent call last):
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[4]>", line 1, in <module>
+ g()
+ ~^^
+ File "...sample_doctest_errors.py", line 12, in g
+ [][0] # line 12
+ ~~^^^
+ IndexError: list index out of range
+ **********************************************************************
+ File "...sample_doctest_errors.py", line 31, in test.test_doctest.sample_doctest_errors.syntax_error
+ Failed example:
+ 2+*3
+ Exception raised:
+ File "<doctest test.test_doctest.sample_doctest_errors.syntax_error[0]>", line 1
+ 2+*3
+ ^
+ SyntaxError: invalid syntax
+ **********************************************************************
+ 4 items had failures:
+ 2 of 2 in test.test_doctest.sample_doctest_errors
+ 2 of 2 in test.test_doctest.sample_doctest_errors.__test__.bad
+ 4 of 5 in test.test_doctest.sample_doctest_errors.errors
+ 1 of 1 in test.test_doctest.sample_doctest_errors.syntax_error
+ ***Test Failed*** 9 failures.
+ TestResults(failed=9, attempted=10)
+"""
+
try:
os.fsencode("foo-bär@baz.py")
supports_unicode = True
@@ -3021,11 +3363,6 @@ Check doctest with a non-ascii filename:
raise Exception('clé')
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest foo-bär@baz[0]>", line 1, in <module>
raise Exception('clé')
Exception: clé
@@ -3318,9 +3655,9 @@ def test_run_doctestsuite_multiple_times():
>>> import test.test_doctest.sample_doctest
>>> suite = doctest.DocTestSuite(test.test_doctest.sample_doctest)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
"""
diff --git a/Lib/test/test_doctest/test_doctest_errors.txt b/Lib/test/test_doctest/test_doctest_errors.txt
new file mode 100644
index 00000000000..93c3c106e60
--- /dev/null
+++ b/Lib/test/test_doctest/test_doctest_errors.txt
@@ -0,0 +1,14 @@
+This is a sample doctest in a text file, in which all examples fail
+or raise an exception.
+
+ >>> 2 + 2
+ 5
+ >>> 1/0
+ 1
+ >>> def f():
+ ... 2 + '2'
+ ...
+ >>> f()
+ 1
+ >>> 2+*3
+ 5
diff --git a/Lib/test/test_doctest/test_doctest_skip.txt b/Lib/test/test_doctest/test_doctest_skip.txt
index f340e2b8141..06c23d06e60 100644
--- a/Lib/test/test_doctest/test_doctest_skip.txt
+++ b/Lib/test/test_doctest/test_doctest_skip.txt
@@ -2,3 +2,5 @@ This is a sample doctest in a text file, in which all examples are skipped.
>>> 2 + 2 # doctest: +SKIP
5
+ >>> 2 + 2 # doctest: +SKIP
+ 4
diff --git a/Lib/test/test_doctest/test_doctest_skip2.txt b/Lib/test/test_doctest/test_doctest_skip2.txt
new file mode 100644
index 00000000000..85e4938c346
--- /dev/null
+++ b/Lib/test/test_doctest/test_doctest_skip2.txt
@@ -0,0 +1,6 @@
+This is a sample doctest in a text file, in which some examples are skipped.
+
+ >>> 2 + 2 # doctest: +SKIP
+ 5
+ >>> 2 + 2
+ 4
diff --git a/Lib/test/test_external_inspection.py b/Lib/test/test_external_inspection.py
index 291c419066a..303af25fc7a 100644
--- a/Lib/test/test_external_inspection.py
+++ b/Lib/test/test_external_inspection.py
@@ -34,17 +34,17 @@ skip_if_not_supported = unittest.skipIf(
def get_stack_trace(pid):
- unwinder = RemoteUnwinder(pid, all_threads=True)
+ unwinder = RemoteUnwinder(pid, all_threads=True, debug=True)
return unwinder.get_stack_trace()
def get_async_stack_trace(pid):
- unwinder = RemoteUnwinder(pid)
+ unwinder = RemoteUnwinder(pid, debug=True)
return unwinder.get_async_stack_trace()
def get_all_awaited_by(pid):
- unwinder = RemoteUnwinder(pid)
+ unwinder = RemoteUnwinder(pid, debug=True)
return unwinder.get_all_awaited_by()
@@ -114,17 +114,17 @@ class TestGetStackTrace(unittest.TestCase):
p.wait(timeout=SHORT_TIMEOUT)
thread_expected_stack_trace = [
- ("foo", script_name, 15),
- ("baz", script_name, 12),
- ("bar", script_name, 9),
- ('Thread.run', threading.__file__, ANY)
+ (script_name, 15, "foo"),
+ (script_name, 12, "baz"),
+ (script_name, 9, "bar"),
+ (threading.__file__, ANY, 'Thread.run')
]
# Is possible that there are more threads, so we check that the
# expected stack traces are in the result (looking at you Windows!)
self.assertIn((ANY, thread_expected_stack_trace), stack_trace)
# Check that the main thread stack trace is in the result
- frame = ("<module>", script_name, 19)
+ frame = (script_name, 19, "<module>")
for _, stack in stack_trace:
if frame in stack:
break
@@ -222,47 +222,47 @@ class TestGetStackTrace(unittest.TestCase):
root_task = "Task-1"
expected_stack_trace = [
[
- ("c5", script_name, 10),
- ("c4", script_name, 14),
- ("c3", script_name, 17),
- ("c2", script_name, 20),
+ (script_name, 10, "c5"),
+ (script_name, 14, "c4"),
+ (script_name, 17, "c3"),
+ (script_name, 20, "c2"),
],
"c2_root",
[
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit"
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__"
),
- ("main", script_name, 26),
+ (script_name, 26, "main"),
],
"Task-1",
[],
],
[
- [("c1", script_name, 23)],
+ [(script_name, 23, "c1")],
"sub_main_1",
[
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit"
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__"
),
- ("main", script_name, 26),
+ (script_name, 26, "main"),
],
"Task-1",
[],
@@ -270,22 +270,22 @@ class TestGetStackTrace(unittest.TestCase):
],
],
[
- [("c1", script_name, 23)],
+ [(script_name, 23, "c1")],
"sub_main_2",
[
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit"
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__"
),
- ("main", script_name, 26),
+ (script_name, 26, "main"),
],
"Task-1",
[],
@@ -363,9 +363,9 @@ class TestGetStackTrace(unittest.TestCase):
expected_stack_trace = [
[
- ("gen_nested_call", script_name, 10),
- ("gen", script_name, 16),
- ("main", script_name, 19),
+ (script_name, 10, "gen_nested_call"),
+ (script_name, 16, "gen"),
+ (script_name, 19, "main"),
],
"Task-1",
[],
@@ -439,9 +439,9 @@ class TestGetStackTrace(unittest.TestCase):
stack_trace[2].sort(key=lambda x: x[1])
expected_stack_trace = [
- [("deep", script_name, 11), ("c1", script_name, 15)],
+ [(script_name, 11, "deep"), (script_name, 15, "c1")],
"Task-2",
- [[[("main", script_name, 21)], "Task-1", []]],
+ [[[(script_name, 21, "main")], "Task-1", []]],
]
self.assertEqual(stack_trace, expected_stack_trace)
@@ -515,16 +515,16 @@ class TestGetStackTrace(unittest.TestCase):
stack_trace[2].sort(key=lambda x: x[1])
expected_stack_trace = [
[
- ("deep", script_name, 11),
- ("c1", script_name, 15),
- ("staggered_race.<locals>.run_one_coro", staggered.__file__, ANY),
+ (script_name, 11, "deep"),
+ (script_name, 15, "c1"),
+ (staggered.__file__, ANY, "staggered_race.<locals>.run_one_coro"),
],
"Task-2",
[
[
[
- ("staggered_race", staggered.__file__, ANY),
- ("main", script_name, 21),
+ (staggered.__file__, ANY, "staggered_race"),
+ (script_name, 21, "main"),
],
"Task-1",
[],
@@ -662,16 +662,16 @@ class TestGetStackTrace(unittest.TestCase):
self.assertIn((ANY, "Task-1", []), entries)
main_stack = [
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit",
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__",
),
- ("main", script_name, 60),
+ (script_name, 60, "main"),
]
self.assertIn(
(ANY, "server task", [[main_stack, ANY]]),
@@ -686,16 +686,16 @@ class TestGetStackTrace(unittest.TestCase):
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit",
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__",
),
- ("echo_client_spam", script_name, 41),
+ (script_name, 41, "echo_client_spam"),
],
ANY,
]
@@ -741,14 +741,14 @@ class TestGetStackTrace(unittest.TestCase):
stack[:2],
[
(
- "get_stack_trace",
__file__,
get_stack_trace.__code__.co_firstlineno + 2,
+ "get_stack_trace",
),
(
- "TestGetStackTrace.test_self_trace",
__file__,
self.test_self_trace.__code__.co_firstlineno + 6,
+ "TestGetStackTrace.test_self_trace",
),
]
)
diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py
index e0e6782258f..7140a7b4f29 100644
--- a/Lib/test/test_fcntl.py
+++ b/Lib/test/test_fcntl.py
@@ -11,7 +11,7 @@ from test.support import (
cpython_only, get_pagesize, is_apple, requires_subprocess, verbose
)
from test.support.import_helper import import_module
-from test.support.os_helper import TESTFN, unlink
+from test.support.os_helper import TESTFN, unlink, make_bad_fd
# Skip test if no fcntl module.
@@ -274,6 +274,17 @@ class TestFcntl(unittest.TestCase):
def test_fcntl_large_buffer(self):
self._check_fcntl_not_mutate_len(2024)
+ @unittest.skipUnless(hasattr(fcntl, 'F_DUPFD'), 'need fcntl.F_DUPFD')
+ def test_bad_fd(self):
+ # gh-134744: Test error handling
+ fd = make_bad_fd()
+ with self.assertRaises(OSError):
+ fcntl.fcntl(fd, fcntl.F_DUPFD, 0)
+ with self.assertRaises(OSError):
+ fcntl.fcntl(fd, fcntl.F_DUPFD, b'\0' * 10)
+ with self.assertRaises(OSError):
+ fcntl.fcntl(fd, fcntl.F_DUPFD, b'\0' * 2048)
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py
index 96b3f305194..d1d2739856c 100644
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -1518,6 +1518,8 @@ class FractionTest(unittest.TestCase):
(F(51, 1000), '.1f', '0.1'),
(F(149, 1000), '.1f', '0.1'),
(F(151, 1000), '.1f', '0.2'),
+ (F(22, 7), '.02f', '3.14'), # issue gh-130662
+ (F(22, 7), '005.02f', '03.14'),
]
for fraction, spec, expected in testcases:
with self.subTest(fraction=fraction, spec=spec):
@@ -1616,12 +1618,6 @@ class FractionTest(unittest.TestCase):
'=010%',
'>00.2f',
'>00f',
- # Too many zeros - minimum width should not have leading zeros
- '006f',
- # Leading zeros in precision
- '.010f',
- '.02f',
- '.000f',
# Missing precision
'.e',
'.f',
diff --git a/Lib/test/test_free_threading/test_itertools_batched.py b/Lib/test/test_free_threading/test_itertools.py
index a754b4f9ea9..8360afbf78c 100644
--- a/Lib/test/test_free_threading/test_itertools_batched.py
+++ b/Lib/test/test_free_threading/test_itertools.py
@@ -1,15 +1,15 @@
import unittest
from threading import Thread, Barrier
-from itertools import batched
+from itertools import batched, cycle
from test.support import threading_helper
threading_helper.requires_working_threading(module=True)
-class EnumerateThreading(unittest.TestCase):
+class ItertoolsThreading(unittest.TestCase):
@threading_helper.reap_threads
- def test_threading(self):
+ def test_batched(self):
number_of_threads = 10
number_of_iterations = 20
barrier = Barrier(number_of_threads)
@@ -34,5 +34,31 @@ class EnumerateThreading(unittest.TestCase):
barrier.reset()
+ @threading_helper.reap_threads
+ def test_cycle(self):
+ number_of_threads = 6
+ number_of_iterations = 10
+ number_of_cycles = 400
+
+ barrier = Barrier(number_of_threads)
+ def work(it):
+ barrier.wait()
+ for _ in range(number_of_cycles):
+ _ = next(it)
+
+ data = (1, 2, 3, 4)
+ for it in range(number_of_iterations):
+ cycle_iterator = cycle(data)
+ worker_threads = []
+ for ii in range(number_of_threads):
+ worker_threads.append(
+ Thread(target=work, args=[cycle_iterator]))
+
+ with threading_helper.start_threads(worker_threads):
+ pass
+
+ barrier.reset()
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index c39565144bf..7f5d48b9c63 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -1,7 +1,7 @@
# Python test set -- part 1, grammar.
# This just tests whether the parser accepts them all.
-from test.support import check_syntax_error
+from test.support import check_syntax_error, skip_wasi_stack_overflow
from test.support import import_helper
import annotationlib
import inspect
@@ -249,6 +249,18 @@ the \'lazy\' dog.\n\
compile(s, "<test>", "exec")
self.assertIn("was never closed", str(cm.exception))
+ @skip_wasi_stack_overflow()
+ def test_max_level(self):
+ # Macro defined in Parser/lexer/state.h
+ MAXLEVEL = 200
+
+ result = eval("(" * MAXLEVEL + ")" * MAXLEVEL)
+ self.assertEqual(result, ())
+
+ with self.assertRaises(SyntaxError) as cm:
+ eval("(" * (MAXLEVEL + 1) + ")" * (MAXLEVEL + 1))
+ self.assertStartsWith(str(cm.exception), 'too many nested parentheses')
+
var_annot_global: int # a global annotated is necessary for test_var_annot
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 161c7652d7a..b83ae181718 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -12,6 +12,7 @@ import io
import itertools
import logging
import os
+import re
import sys
import sysconfig
import tempfile
@@ -97,6 +98,14 @@ def read_vectors(hash_name):
yield parts
+DEPRECATED_STRING_PARAMETER = re.escape(
+ "the 'string' keyword parameter is deprecated since "
+ "Python 3.15 and slated for removal in Python 3.19; "
+ "use the 'data' keyword parameter or pass the data "
+ "to hash as a positional argument instead"
+)
+
+
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
'sha224', 'SHA224', 'sha256', 'SHA256',
@@ -140,11 +149,10 @@ class HashLibTestCase(unittest.TestCase):
# of hashlib.new given the algorithm name.
for algorithm, constructors in self.constructors_to_test.items():
constructors.add(getattr(hashlib, algorithm))
- def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs):
- if data is None:
- return hashlib.new(_alg, **kwargs)
- return hashlib.new(_alg, data, **kwargs)
- constructors.add(_test_algorithm_via_hashlib_new)
+ def c(*args, __algorithm_name=algorithm, **kwargs):
+ return hashlib.new(__algorithm_name, *args, **kwargs)
+ c.__name__ = f'do_test_algorithm_via_hashlib_new_{algorithm}'
+ constructors.add(c)
_hashlib = self._conditional_import_module('_hashlib')
self._hashlib = _hashlib
@@ -249,6 +257,81 @@ class HashLibTestCase(unittest.TestCase):
self._hashlib.new("md5", usedforsecurity=False)
self._hashlib.openssl_md5(usedforsecurity=False)
+ @unittest.skipIf(get_fips_mode(), "skip in FIPS mode")
+ def test_clinic_signature(self):
+ for constructor in self.hash_constructors:
+ with self.subTest(constructor.__name__):
+ constructor(b'')
+ constructor(data=b'')
+ with self.assertWarnsRegex(DeprecationWarning,
+ DEPRECATED_STRING_PARAMETER):
+ constructor(string=b'')
+
+ digest_name = constructor(b'').name
+ with self.subTest(digest_name):
+ hashlib.new(digest_name, b'')
+ hashlib.new(digest_name, data=b'')
+ with self.assertWarnsRegex(DeprecationWarning,
+ DEPRECATED_STRING_PARAMETER):
+ hashlib.new(digest_name, string=b'')
+ if self._hashlib:
+ self._hashlib.new(digest_name, b'')
+ self._hashlib.new(digest_name, data=b'')
+ with self.assertWarnsRegex(DeprecationWarning,
+ DEPRECATED_STRING_PARAMETER):
+ self._hashlib.new(digest_name, string=b'')
+
+ @unittest.skipIf(get_fips_mode(), "skip in FIPS mode")
+ def test_clinic_signature_errors(self):
+ nomsg = b''
+ mymsg = b'msg'
+ conflicting_call = re.escape(
+ "'data' and 'string' are mutually exclusive "
+ "and support for 'string' keyword parameter "
+ "is slated for removal in a future version."
+ )
+ duplicated_param = re.escape("given by name ('data') and position")
+ unexpected_param = re.escape("got an unexpected keyword argument '_'")
+ for args, kwds, errmsg in [
+ # Reject duplicated arguments before unknown keyword arguments.
+ ((nomsg,), dict(data=nomsg, _=nomsg), duplicated_param),
+ ((mymsg,), dict(data=nomsg, _=nomsg), duplicated_param),
+ # Reject duplicated arguments before conflicting ones.
+ *itertools.product(
+ [[nomsg], [mymsg]],
+ [dict(data=nomsg), dict(data=nomsg, string=nomsg)],
+ [duplicated_param]
+ ),
+ # Reject unknown keyword arguments before conflicting ones.
+ *itertools.product(
+ [()],
+ [
+ dict(_=None),
+ dict(data=nomsg, _=None),
+ dict(string=nomsg, _=None),
+ dict(string=nomsg, data=nomsg, _=None),
+ ],
+ [unexpected_param]
+ ),
+ ((nomsg,), dict(_=None), unexpected_param),
+ ((mymsg,), dict(_=None), unexpected_param),
+ # Reject conflicting arguments.
+ [(nomsg,), dict(string=nomsg), conflicting_call],
+ [(mymsg,), dict(string=nomsg), conflicting_call],
+ [(), dict(data=nomsg, string=nomsg), conflicting_call],
+ ]:
+ for constructor in self.hash_constructors:
+ digest_name = constructor(b'').name
+ with self.subTest(constructor.__name__, args=args, kwds=kwds):
+ with self.assertRaisesRegex(TypeError, errmsg):
+ constructor(*args, **kwds)
+ with self.subTest(digest_name, args=args, kwds=kwds):
+ with self.assertRaisesRegex(TypeError, errmsg):
+ hashlib.new(digest_name, *args, **kwds)
+ if self._hashlib:
+ with self.assertRaisesRegex(TypeError, errmsg):
+ self._hashlib.new(digest_name, *args, **kwds)
+
def test_unknown_hash(self):
self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
self.assertRaises(TypeError, hashlib.new, 1)
@@ -718,8 +801,6 @@ class HashLibTestCase(unittest.TestCase):
self.assertRaises(ValueError, constructor, node_offset=-1)
self.assertRaises(OverflowError, constructor, node_offset=max_offset+1)
- self.assertRaises(TypeError, constructor, data=b'')
- self.assertRaises(TypeError, constructor, string=b'')
self.assertRaises(TypeError, constructor, '')
constructor(
diff --git a/Lib/test/test_interpreters/test_api.py b/Lib/test/test_interpreters/test_api.py
index 1e2d572b1cb..b3c9ef8efba 100644
--- a/Lib/test/test_interpreters/test_api.py
+++ b/Lib/test/test_interpreters/test_api.py
@@ -1,17 +1,22 @@
+import contextlib
import os
import pickle
+import sys
from textwrap import dedent
import threading
import types
import unittest
from test import support
+from test.support import os_helper
+from test.support import script_helper
from test.support import import_helper
# Raise SkipTest if subinterpreters not supported.
_interpreters = import_helper.import_module('_interpreters')
from test.support import Py_GIL_DISABLED
from test.support import interpreters
from test.support import force_not_colorized
+import test._crossinterp_definitions as defs
from test.support.interpreters import (
InterpreterError, InterpreterNotFoundError, ExecutionFailed,
)
@@ -29,6 +34,59 @@ WHENCE_STR_XI = 'cross-interpreter C-API'
WHENCE_STR_STDLIB = '_interpreters module'
+def is_pickleable(obj):
+ try:
+ pickle.dumps(obj)
+ except Exception:
+ return False
+ return True
+
+
+@contextlib.contextmanager
+def defined_in___main__(name, script, *, remove=False):
+ import __main__ as mainmod
+ mainns = vars(mainmod)
+ assert name not in mainns
+ exec(script, mainns, mainns)
+ if remove:
+ yield mainns.pop(name)
+ else:
+ try:
+ yield mainns[name]
+ finally:
+ mainns.pop(name, None)
+
+
+def build_excinfo(exctype, msg=None, formatted=None, errdisplay=None):
+ if isinstance(exctype, type):
+ assert issubclass(exctype, BaseException), exctype
+ exctype = types.SimpleNamespace(
+ __name__=exctype.__name__,
+ __qualname__=exctype.__qualname__,
+ __module__=exctype.__module__,
+ )
+ elif isinstance(exctype, str):
+ module, _, name = exctype.rpartition(exctype)
+ if not module and name in __builtins__:
+ module = 'builtins'
+ exctype = types.SimpleNamespace(
+ __name__=name,
+ __qualname__=exctype,
+ __module__=module or None,
+ )
+ else:
+ assert isinstance(exctype, types.SimpleNamespace)
+ assert msg is None or isinstance(msg, str), msg
+ assert formatted is None or isinstance(formatted, str), formatted
+ assert errdisplay is None or isinstance(errdisplay, str), errdisplay
+ return types.SimpleNamespace(
+ type=exctype,
+ msg=msg,
+ formatted=formatted,
+ errdisplay=errdisplay,
+ )
+
+
class ModuleTests(TestBase):
def test_queue_aliases(self):
@@ -839,9 +897,16 @@ class TestInterpreterExec(TestBase):
interp.exec(10)
def test_bytes_for_script(self):
+ r, w = self.pipe()
+ RAN = b'R'
+ DONE = b'D'
interp = interpreters.create()
- with self.assertRaises(TypeError):
- interp.exec(b'print("spam")')
+ interp.exec(f"""if True:
+ import os
+ os.write({w}, {RAN!r})
+ """)
+ os.write(w, DONE)
+ self.assertEqual(os.read(r, 1), RAN)
def test_with_background_threads_still_running(self):
r_interp, w_interp = self.pipe()
@@ -883,24 +948,26 @@ class TestInterpreterExec(TestBase):
# Interpreter.exec() behavior.
-def call_func_noop():
- pass
+call_func_noop = defs.spam_minimal
+call_func_ident = defs.spam_returns_arg
+call_func_failure = defs.spam_raises
def call_func_return_shareable():
return (1, None)
-def call_func_return_not_shareable():
- return [1, 2, 3]
+def call_func_return_stateless_func():
+ return (lambda x: x)
-def call_func_failure():
- raise Exception('spam!')
+def call_func_return_pickleable():
+ return [1, 2, 3]
-def call_func_ident(value):
- return value
+def call_func_return_unpickleable():
+ x = 42
+ return (lambda: x)
def get_call_func_closure(value):
@@ -909,6 +976,11 @@ def get_call_func_closure(value):
return call_func_closure
+def call_func_exec_wrapper(script, ns):
+ res = exec(script, ns, ns)
+ return res, ns, id(ns)
+
+
class Spam:
@staticmethod
@@ -1005,86 +1077,375 @@ class TestInterpreterCall(TestBase):
# - preserves info (e.g. SyntaxError)
# - matching error display
- def test_call(self):
+ @contextlib.contextmanager
+ def assert_fails(self, expected):
+ with self.assertRaises(ExecutionFailed) as cm:
+ yield cm
+ uncaught = cm.exception.excinfo
+ self.assertEqual(uncaught.type.__name__, expected.__name__)
+
+ def assert_fails_not_shareable(self):
+ return self.assert_fails(interpreters.NotShareableError)
+
+ def assert_code_equal(self, code1, code2):
+ if code1 == code2:
+ return
+ self.assertEqual(code1.co_name, code2.co_name)
+ self.assertEqual(code1.co_flags, code2.co_flags)
+ self.assertEqual(code1.co_consts, code2.co_consts)
+ self.assertEqual(code1.co_varnames, code2.co_varnames)
+ self.assertEqual(code1.co_cellvars, code2.co_cellvars)
+ self.assertEqual(code1.co_freevars, code2.co_freevars)
+ self.assertEqual(code1.co_names, code2.co_names)
+ self.assertEqual(
+ _testinternalcapi.get_code_var_counts(code1),
+ _testinternalcapi.get_code_var_counts(code2),
+ )
+ self.assertEqual(code1.co_code, code2.co_code)
+
+ def assert_funcs_equal(self, func1, func2):
+ if func1 == func2:
+ return
+ self.assertIs(type(func1), type(func2))
+ self.assertEqual(func1.__name__, func2.__name__)
+ self.assertEqual(func1.__defaults__, func2.__defaults__)
+ self.assertEqual(func1.__kwdefaults__, func2.__kwdefaults__)
+ self.assertEqual(func1.__closure__, func2.__closure__)
+ self.assert_code_equal(func1.__code__, func2.__code__)
+ self.assertEqual(
+ _testinternalcapi.get_code_var_counts(func1),
+ _testinternalcapi.get_code_var_counts(func2),
+ )
+
+ def assert_exceptions_equal(self, exc1, exc2):
+ assert isinstance(exc1, Exception)
+ assert isinstance(exc2, Exception)
+ if exc1 == exc2:
+ return
+ self.assertIs(type(exc1), type(exc2))
+ self.assertEqual(exc1.args, exc2.args)
+
+ def test_stateless_funcs(self):
interp = interpreters.create()
- for i, (callable, args, kwargs) in enumerate([
- (call_func_noop, (), {}),
- (call_func_return_shareable, (), {}),
- (call_func_return_not_shareable, (), {}),
- (Spam.noop, (), {}),
+ func = call_func_noop
+ with self.subTest('no args, no return'):
+ res = interp.call(func)
+ self.assertIsNone(res)
+
+ func = call_func_return_shareable
+ with self.subTest('no args, returns shareable'):
+ res = interp.call(func)
+ self.assertEqual(res, (1, None))
+
+ func = call_func_return_stateless_func
+ expected = (lambda x: x)
+ with self.subTest('no args, returns stateless func'):
+ res = interp.call(func)
+ self.assert_funcs_equal(res, expected)
+
+ func = call_func_return_pickleable
+ with self.subTest('no args, returns pickleable'):
+ res = interp.call(func)
+ self.assertEqual(res, [1, 2, 3])
+
+ func = call_func_return_unpickleable
+ with self.subTest('no args, returns unpickleable'):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func)
+
+ def test_stateless_func_returns_arg(self):
+ interp = interpreters.create()
+
+ for arg in [
+ None,
+ 10,
+ 'spam!',
+ b'spam!',
+ (1, 2, 'spam!'),
+ memoryview(b'spam!'),
+ ]:
+ with self.subTest(f'shareable {arg!r}'):
+ assert _interpreters.is_shareable(arg)
+ res = interp.call(defs.spam_returns_arg, arg)
+ self.assertEqual(res, arg)
+
+ for arg in defs.STATELESS_FUNCTIONS:
+ with self.subTest(f'stateless func {arg!r}'):
+ res = interp.call(defs.spam_returns_arg, arg)
+ self.assert_funcs_equal(res, arg)
+
+ for arg in defs.TOP_FUNCTIONS:
+ if arg in defs.STATELESS_FUNCTIONS:
+ continue
+ with self.subTest(f'stateful func {arg!r}'):
+ res = interp.call(defs.spam_returns_arg, arg)
+ self.assert_funcs_equal(res, arg)
+ assert is_pickleable(arg)
+
+ for arg in [
+ Ellipsis,
+ NotImplemented,
+ object(),
+ 2**1000,
+ [1, 2, 3],
+ {'a': 1, 'b': 2},
+ types.SimpleNamespace(x=42),
+ # builtin types
+ object,
+ type,
+ Exception,
+ ModuleNotFoundError,
+ # builtin exceptions
+ Exception('uh-oh!'),
+ ModuleNotFoundError('mymodule'),
+ # builtin fnctions
+ len,
+ sys.exit,
+ # user classes
+ *defs.TOP_CLASSES,
+ *(c(*a) for c, a in defs.TOP_CLASSES.items()
+ if c not in defs.CLASSES_WITHOUT_EQUALITY),
+ ]:
+ with self.subTest(f'pickleable {arg!r}'):
+ res = interp.call(defs.spam_returns_arg, arg)
+ if type(arg) is object:
+ self.assertIs(type(res), object)
+ elif isinstance(arg, BaseException):
+ self.assert_exceptions_equal(res, arg)
+ else:
+ self.assertEqual(res, arg)
+ assert is_pickleable(arg)
+
+ for arg in [
+ types.MappingProxyType({}),
+ *(f for f in defs.NESTED_FUNCTIONS
+ if f not in defs.STATELESS_FUNCTIONS),
+ ]:
+ with self.subTest(f'unpickleable {arg!r}'):
+ assert not _interpreters.is_shareable(arg)
+ assert not is_pickleable(arg)
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(defs.spam_returns_arg, arg)
+
+ def test_full_args(self):
+ interp = interpreters.create()
+ expected = (1, 2, 3, 4, 5, 6, ('?',), {'g': 7, 'h': 8})
+ func = defs.spam_full_args
+ res = interp.call(func, 1, 2, 3, 4, '?', e=5, f=6, g=7, h=8)
+ self.assertEqual(res, expected)
+
+ def test_full_defaults(self):
+ # pickleable, but not stateless
+ interp = interpreters.create()
+ expected = (-1, -2, -3, -4, -5, -6, (), {'g': 8, 'h': 9})
+ res = interp.call(defs.spam_full_args_with_defaults, g=8, h=9)
+ self.assertEqual(res, expected)
+
+ def test_modified_arg(self):
+ interp = interpreters.create()
+ script = dedent("""
+ a = 7
+ b = 2
+ c = a ** b
+ """)
+ ns = {}
+ expected = {'a': 7, 'b': 2, 'c': 49}
+ res = interp.call(call_func_exec_wrapper, script, ns)
+ obj, resns, resid = res
+ del resns['__builtins__']
+ self.assertIsNone(obj)
+ self.assertEqual(ns, {})
+ self.assertEqual(resns, expected)
+ self.assertNotEqual(resid, id(ns))
+ self.assertNotEqual(resid, id(resns))
+
+ def test_func_in___main___valid(self):
+ # pickleable, already there'
+
+ with os_helper.temp_dir() as tempdir:
+ def new_mod(name, text):
+ script_helper.make_script(tempdir, name, dedent(text))
+
+ def run(text):
+ name = 'myscript'
+ text = dedent(f"""
+ import sys
+ sys.path.insert(0, {tempdir!r})
+
+ """) + dedent(text)
+ filename = script_helper.make_script(tempdir, name, text)
+ res = script_helper.assert_python_ok(filename)
+ return res.out.decode('utf-8').strip()
+
+ # no module indirection
+ with self.subTest('no indirection'):
+ text = run(f"""
+ from test.support import interpreters
+
+ def spam():
+ # This a global var...
+ return __name__
+
+ if __name__ == '__main__':
+ interp = interpreters.create()
+ res = interp.call(spam)
+ print(res)
+ """)
+ self.assertEqual(text, '<fake __main__>')
+
+ # indirect as func, direct interp
+ new_mod('mymod', f"""
+ def run(interp, func):
+ return interp.call(func)
+ """)
+ with self.subTest('indirect as func, direct interp'):
+ text = run(f"""
+ from test.support import interpreters
+ import mymod
+
+ def spam():
+ # This a global var...
+ return __name__
+
+ if __name__ == '__main__':
+ interp = interpreters.create()
+ res = mymod.run(interp, spam)
+ print(res)
+ """)
+ self.assertEqual(text, '<fake __main__>')
+
+ # indirect as func, indirect interp
+ new_mod('mymod', f"""
+ from test.support import interpreters
+ def run(func):
+ interp = interpreters.create()
+ return interp.call(func)
+ """)
+ with self.subTest('indirect as func, indirect interp'):
+ text = run(f"""
+ import mymod
+
+ def spam():
+ # This a global var...
+ return __name__
+
+ if __name__ == '__main__':
+ res = mymod.run(spam)
+ print(res)
+ """)
+ self.assertEqual(text, '<fake __main__>')
+
+ def test_func_in___main___invalid(self):
+ interp = interpreters.create()
+
+ funcname = f'{__name__.replace(".", "_")}_spam_okay'
+ script = dedent(f"""
+ def {funcname}():
+ # This a global var...
+ return __name__
+ """)
+
+ with self.subTest('pickleable, added dynamically'):
+ with defined_in___main__(funcname, script) as arg:
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(defs.spam_returns_arg, arg)
+
+ with self.subTest('lying about __main__'):
+ with defined_in___main__(funcname, script, remove=True) as arg:
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(defs.spam_returns_arg, arg)
+
+ def test_raises(self):
+ interp = interpreters.create()
+ with self.assertRaises(ExecutionFailed):
+ interp.call(call_func_failure)
+
+ with self.assert_fails(ValueError):
+ interp.call(call_func_complex, '???', exc=ValueError('spam'))
+
+ def test_call_valid(self):
+ interp = interpreters.create()
+
+ for i, (callable, args, kwargs, expected) in enumerate([
+ (call_func_noop, (), {}, None),
+ (call_func_ident, ('spamspamspam',), {}, 'spamspamspam'),
+ (call_func_return_shareable, (), {}, (1, None)),
+ (call_func_return_pickleable, (), {}, [1, 2, 3]),
+ (Spam.noop, (), {}, None),
+ (Spam.from_values, (), {}, Spam(())),
+ (Spam.from_values, (1, 2, 3), {}, Spam((1, 2, 3))),
+ (Spam, ('???',), {}, Spam('???')),
+ (Spam(101), (), {}, (101, (), {})),
+ (Spam(10101).run, (), {}, (10101, (), {})),
+ (call_func_complex, ('ident', 'spam'), {}, 'spam'),
+ (call_func_complex, ('full-ident', 'spam'), {}, ('spam', (), {})),
+ (call_func_complex, ('full-ident', 'spam', 'ham'), {'eggs': '!!!'},
+ ('spam', ('ham',), {'eggs': '!!!'})),
+ (call_func_complex, ('globals',), {}, __name__),
+ (call_func_complex, ('interpid',), {}, interp.id),
+ (call_func_complex, ('custom', 'spam!'), {}, Spam('spam!')),
]):
with self.subTest(f'success case #{i+1}'):
- res = interp.call(callable)
- self.assertIs(res, None)
+ res = interp.call(callable, *args, **kwargs)
+ self.assertEqual(res, expected)
+
+ def test_call_invalid(self):
+ interp = interpreters.create()
+
+ func = get_call_func_closure
+ with self.subTest(func):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func, 42)
+
+ func = get_call_func_closure(42)
+ with self.subTest(func):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func)
+
+ func = call_func_complex
+ op = 'closure'
+ with self.subTest(f'{func} ({op})'):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func, op, value='~~~')
+
+ op = 'custom-inner'
+ with self.subTest(f'{func} ({op})'):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func, op, 'eggs!')
+
+ def test_call_in_thread(self):
+ interp = interpreters.create()
for i, (callable, args, kwargs) in enumerate([
- (call_func_ident, ('spamspamspam',), {}),
- (get_call_func_closure, (42,), {}),
- (get_call_func_closure(42), (), {}),
+ (call_func_noop, (), {}),
+ (call_func_return_shareable, (), {}),
+ (call_func_return_pickleable, (), {}),
(Spam.from_values, (), {}),
(Spam.from_values, (1, 2, 3), {}),
- (Spam, ('???'), {}),
(Spam(101), (), {}),
(Spam(10101).run, (), {}),
+ (Spam.noop, (), {}),
(call_func_complex, ('ident', 'spam'), {}),
(call_func_complex, ('full-ident', 'spam'), {}),
(call_func_complex, ('full-ident', 'spam', 'ham'), {'eggs': '!!!'}),
(call_func_complex, ('globals',), {}),
(call_func_complex, ('interpid',), {}),
- (call_func_complex, ('closure',), {'value': '~~~'}),
(call_func_complex, ('custom', 'spam!'), {}),
- (call_func_complex, ('custom-inner', 'eggs!'), {}),
- (call_func_complex, ('???',), {'exc': ValueError('spam')}),
- ]):
- with self.subTest(f'invalid case #{i+1}'):
- with self.assertRaises(Exception):
- if args or kwargs:
- raise Exception((args, kwargs))
- interp.call(callable)
-
- with self.assertRaises(ExecutionFailed):
- interp.call(call_func_failure)
-
- def test_call_in_thread(self):
- interp = interpreters.create()
-
- for i, (callable, args, kwargs) in enumerate([
- (call_func_noop, (), {}),
- (call_func_return_shareable, (), {}),
- (call_func_return_not_shareable, (), {}),
- (Spam.noop, (), {}),
]):
with self.subTest(f'success case #{i+1}'):
with self.captured_thread_exception() as ctx:
- t = interp.call_in_thread(callable)
+ t = interp.call_in_thread(callable, *args, **kwargs)
t.join()
self.assertIsNone(ctx.caught)
for i, (callable, args, kwargs) in enumerate([
- (call_func_ident, ('spamspamspam',), {}),
(get_call_func_closure, (42,), {}),
(get_call_func_closure(42), (), {}),
- (Spam.from_values, (), {}),
- (Spam.from_values, (1, 2, 3), {}),
- (Spam, ('???'), {}),
- (Spam(101), (), {}),
- (Spam(10101).run, (), {}),
- (call_func_complex, ('ident', 'spam'), {}),
- (call_func_complex, ('full-ident', 'spam'), {}),
- (call_func_complex, ('full-ident', 'spam', 'ham'), {'eggs': '!!!'}),
- (call_func_complex, ('globals',), {}),
- (call_func_complex, ('interpid',), {}),
- (call_func_complex, ('closure',), {'value': '~~~'}),
- (call_func_complex, ('custom', 'spam!'), {}),
- (call_func_complex, ('custom-inner', 'eggs!'), {}),
- (call_func_complex, ('???',), {'exc': ValueError('spam')}),
]):
with self.subTest(f'invalid case #{i+1}'):
- if args or kwargs:
- continue
with self.captured_thread_exception() as ctx:
- t = interp.call_in_thread(callable)
+ t = interp.call_in_thread(callable, *args, **kwargs)
t.join()
self.assertIsNotNone(ctx.caught)
@@ -1593,18 +1954,14 @@ class LowLevelTests(TestBase):
with results:
exc = _interpreters.exec(interpid, script)
out = results.stdout()
- self.assertEqual(out, '')
- self.assert_ns_equal(exc, types.SimpleNamespace(
- type=types.SimpleNamespace(
- __name__='Exception',
- __qualname__='Exception',
- __module__='builtins',
- ),
- msg='uh-oh!',
+ expected = build_excinfo(
+ Exception, 'uh-oh!',
# We check these in other tests.
formatted=exc.formatted,
errdisplay=exc.errdisplay,
- ))
+ )
+ self.assertEqual(out, '')
+ self.assert_ns_equal(exc, expected)
with self.subTest('from C-API'):
with self.interpreter_from_capi() as interpid:
@@ -1616,25 +1973,50 @@ class LowLevelTests(TestBase):
self.assertEqual(exc.msg, 'it worked!')
def test_call(self):
- with self.subTest('no args'):
- interpid = _interpreters.create()
- exc = _interpreters.call(interpid, call_func_return_shareable)
- self.assertIs(exc, None)
+ interpid = _interpreters.create()
+
+ # Here we focus on basic args and return values.
+ # See TestInterpreterCall for full operational coverage,
+ # including supported callables.
+
+ with self.subTest('no args, return None'):
+ func = defs.spam_minimal
+ res, exc = _interpreters.call(interpid, func)
+ self.assertIsNone(exc)
+ self.assertIsNone(res)
+
+ with self.subTest('empty args, return None'):
+ func = defs.spam_minimal
+ res, exc = _interpreters.call(interpid, func, (), {})
+ self.assertIsNone(exc)
+ self.assertIsNone(res)
+
+ with self.subTest('no args, return non-None'):
+ func = defs.script_with_return
+ res, exc = _interpreters.call(interpid, func)
+ self.assertIsNone(exc)
+ self.assertIs(res, True)
+
+ with self.subTest('full args, return non-None'):
+ expected = (1, 2, 3, 4, 5, 6, (7, 8), {'g': 9, 'h': 0})
+ func = defs.spam_full_args
+ args = (1, 2, 3, 4, 7, 8)
+ kwargs = dict(e=5, f=6, g=9, h=0)
+ res, exc = _interpreters.call(interpid, func, args, kwargs)
+ self.assertIsNone(exc)
+ self.assertEqual(res, expected)
with self.subTest('uncaught exception'):
- interpid = _interpreters.create()
- exc = _interpreters.call(interpid, call_func_failure)
- self.assertEqual(exc, types.SimpleNamespace(
- type=types.SimpleNamespace(
- __name__='Exception',
- __qualname__='Exception',
- __module__='builtins',
- ),
- msg='spam!',
+ func = defs.spam_raises
+ res, exc = _interpreters.call(interpid, func)
+ expected = build_excinfo(
+ Exception, 'spam!',
# We check these in other tests.
formatted=exc.formatted,
errdisplay=exc.errdisplay,
- ))
+ )
+ self.assertIsNone(res)
+ self.assertEqual(exc, expected)
@requires_test_modules
def test_set___main___attrs(self):
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 168e66c5a3f..0c921ffbc25 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -1062,6 +1062,37 @@ class IOTest(unittest.TestCase):
# Silence destructor error
R.flush = lambda self: None
+ @threading_helper.requires_working_threading()
+ def test_write_readline_races(self):
+ # gh-134908: Concurrent iteration over a file caused races
+ thread_count = 2
+ write_count = 100
+ read_count = 100
+
+ def writer(file, barrier):
+ barrier.wait()
+ for _ in range(write_count):
+ file.write("x")
+
+ def reader(file, barrier):
+ barrier.wait()
+ for _ in range(read_count):
+ for line in file:
+ self.assertEqual(line, "")
+
+ with self.open(os_helper.TESTFN, "w+") as f:
+ barrier = threading.Barrier(thread_count + 1)
+ reader = threading.Thread(target=reader, args=(f, barrier))
+ writers = [threading.Thread(target=writer, args=(f, barrier))
+ for _ in range(thread_count)]
+ with threading_helper.catch_threading_exception() as cm:
+ with threading_helper.start_threads(writers + [reader]):
+ pass
+ self.assertIsNone(cm.exc_type)
+
+ self.assertEqual(os.stat(os_helper.TESTFN).st_size,
+ write_count * thread_count)
+
class CIOTest(IOTest):
diff --git a/Lib/test/test_ioctl.py b/Lib/test/test_ioctl.py
index 3c7a58aa2bc..277d2fc99ea 100644
--- a/Lib/test/test_ioctl.py
+++ b/Lib/test/test_ioctl.py
@@ -5,7 +5,7 @@ import sys
import threading
import unittest
from test import support
-from test.support import threading_helper
+from test.support import os_helper, threading_helper
from test.support.import_helper import import_module
fcntl = import_module('fcntl')
termios = import_module('termios')
@@ -201,6 +201,17 @@ class IoctlTestsPty(unittest.TestCase):
new_winsz = struct.unpack("HHHH", result)
self.assertEqual(new_winsz[:2], (20, 40))
+ @unittest.skipUnless(hasattr(fcntl, 'FICLONE'), 'need fcntl.FICLONE')
+ def test_bad_fd(self):
+ # gh-134744: Test error handling
+ fd = os_helper.make_bad_fd()
+ with self.assertRaises(OSError):
+ fcntl.ioctl(fd, fcntl.FICLONE, fd)
+ with self.assertRaises(OSError):
+ fcntl.ioctl(fd, fcntl.FICLONE, b'\0' * 10)
+ with self.assertRaises(OSError):
+ fcntl.ioctl(fd, fcntl.FICLONE, b'\0' * 2048)
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py
index ee95454e64b..db1c38243e2 100644
--- a/Lib/test/test_ipaddress.py
+++ b/Lib/test/test_ipaddress.py
@@ -399,14 +399,16 @@ class AddressTestCase_v6(BaseTestCase, CommonTestMixin_v6):
def test_bad_address_split_v6_too_long(self):
def assertBadSplit(addr):
- msg = r"At most 39 characters expected in %s"
- with self.assertAddressError(msg, repr(re.escape(addr[:14]))):
+ msg = r"At most 45 characters expected in '%s"
+ with self.assertAddressError(msg, re.escape(addr[:45])):
ipaddress.IPv6Address(addr)
# Long IPv6 address
long_addr = ("0:" * 10000) + "0"
assertBadSplit(long_addr)
assertBadSplit(long_addr + "%zoneid")
+ assertBadSplit(long_addr + ":255.255.255.255")
+ assertBadSplit(long_addr + ":ffff:255.255.255.255")
def test_bad_address_split_v6_too_many_parts(self):
def assertBadSplit(addr):
@@ -2189,6 +2191,11 @@ class IpaddrUnitTest(unittest.TestCase):
self.assertEqual(ipaddress.ip_address('FFFF::192.0.2.1'),
ipaddress.ip_address('FFFF::c000:201'))
+ self.assertEqual(ipaddress.ip_address('0000:0000:0000:0000:0000:FFFF:192.168.255.255'),
+ ipaddress.ip_address('::ffff:c0a8:ffff'))
+ self.assertEqual(ipaddress.ip_address('FFFF:0000:0000:0000:0000:0000:192.168.255.255'),
+ ipaddress.ip_address('ffff::c0a8:ffff'))
+
self.assertEqual(ipaddress.ip_address('::FFFF:192.0.2.1%scope'),
ipaddress.ip_address('::FFFF:c000:201%scope'))
self.assertEqual(ipaddress.ip_address('FFFF::192.0.2.1%scope'),
@@ -2201,6 +2208,10 @@ class IpaddrUnitTest(unittest.TestCase):
ipaddress.ip_address('::FFFF:c000:201%scope'))
self.assertNotEqual(ipaddress.ip_address('FFFF::192.0.2.1'),
ipaddress.ip_address('FFFF::c000:201%scope'))
+ self.assertEqual(ipaddress.ip_address('0000:0000:0000:0000:0000:FFFF:192.168.255.255%scope'),
+ ipaddress.ip_address('::ffff:c0a8:ffff%scope'))
+ self.assertEqual(ipaddress.ip_address('FFFF:0000:0000:0000:0000:0000:192.168.255.255%scope'),
+ ipaddress.ip_address('ffff::c0a8:ffff%scope'))
def testIPVersion(self):
self.assertEqual(ipaddress.IPv4Address.version, 4)
@@ -2610,6 +2621,10 @@ class IpaddrUnitTest(unittest.TestCase):
'::7:6:5:4:3:2:0': '0:7:6:5:4:3:2:0/128',
'7:6:5:4:3:2:1::': '7:6:5:4:3:2:1:0/128',
'0:6:5:4:3:2:1::': '0:6:5:4:3:2:1:0/128',
+ '0000:0000:0000:0000:0000:0000:255.255.255.255': '::ffff:ffff/128',
+ '0000:0000:0000:0000:0000:ffff:255.255.255.255': '::ffff:255.255.255.255/128',
+ 'ffff:ffff:ffff:ffff:ffff:ffff:255.255.255.255':
+ 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128',
}
for uncompressed, compressed in list(test_addresses.items()):
self.assertEqual(compressed, str(ipaddress.IPv6Interface(
diff --git a/Lib/test/test_json/test_dump.py b/Lib/test/test_json/test_dump.py
index 13b40020781..39470754003 100644
--- a/Lib/test/test_json/test_dump.py
+++ b/Lib/test/test_json/test_dump.py
@@ -22,6 +22,14 @@ class TestDump:
self.assertIn('valid_key', o)
self.assertNotIn(b'invalid_key', o)
+ def test_dump_skipkeys_indent_empty(self):
+ v = {b'invalid_key': False}
+ self.assertEqual(self.json.dumps(v, skipkeys=True, indent=4), '{}')
+
+ def test_skipkeys_indent(self):
+ v = {b'invalid_key': False, 'valid_key': True}
+ self.assertEqual(self.json.dumps(v, skipkeys=True, indent=4), '{\n "valid_key": true\n}')
+
def test_encode_truefalse(self):
self.assertEqual(self.dumps(
{True: False, False: True}, sort_keys=True),
diff --git a/Lib/test/test_list.py b/Lib/test/test_list.py
index 6894fba2ad1..223f34fb696 100644
--- a/Lib/test/test_list.py
+++ b/Lib/test/test_list.py
@@ -365,5 +365,20 @@ class ListTest(list_tests.CommonTest):
rc, _, _ = assert_python_ok("-c", code)
self.assertEqual(rc, 0)
+ def test_list_overwrite_local(self):
+ """Test that overwriting the last reference to the
+ iterable doesn't prematurely free the iterable"""
+
+ def foo(x):
+ self.assertEqual(sys.getrefcount(x), 1)
+ r = 0
+ for i in x:
+ r += i
+ x = None
+ return r
+
+ self.assertEqual(foo(list(range(10))), 45)
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py
index 913a60bf9e0..384ad5c828d 100644
--- a/Lib/test/test_math.py
+++ b/Lib/test/test_math.py
@@ -1214,6 +1214,12 @@ class MathTests(unittest.TestCase):
self.assertEqual(math.ldexp(NINF, n), NINF)
self.assertTrue(math.isnan(math.ldexp(NAN, n)))
+ @requires_IEEE_754
+ def testLdexp_denormal(self):
+ # Denormal output incorrectly rounded (truncated)
+ # on some Windows.
+ self.assertEqual(math.ldexp(6993274598585239, -1126), 1e-323)
+
def testLog(self):
self.assertRaises(TypeError, math.log)
self.assertRaises(TypeError, math.log, 1, 2, 3)
@@ -1967,6 +1973,28 @@ class MathTests(unittest.TestCase):
self.assertFalse(math.isfinite(float("inf")))
self.assertFalse(math.isfinite(float("-inf")))
+ def testIsnormal(self):
+ self.assertTrue(math.isnormal(1.25))
+ self.assertTrue(math.isnormal(-1.0))
+ self.assertFalse(math.isnormal(0.0))
+ self.assertFalse(math.isnormal(-0.0))
+ self.assertFalse(math.isnormal(INF))
+ self.assertFalse(math.isnormal(NINF))
+ self.assertFalse(math.isnormal(NAN))
+ self.assertFalse(math.isnormal(FLOAT_MIN/2))
+ self.assertFalse(math.isnormal(-FLOAT_MIN/2))
+
+ def testIssubnormal(self):
+ self.assertFalse(math.issubnormal(1.25))
+ self.assertFalse(math.issubnormal(-1.0))
+ self.assertFalse(math.issubnormal(0.0))
+ self.assertFalse(math.issubnormal(-0.0))
+ self.assertFalse(math.issubnormal(INF))
+ self.assertFalse(math.issubnormal(NINF))
+ self.assertFalse(math.issubnormal(NAN))
+ self.assertTrue(math.issubnormal(FLOAT_MIN/2))
+ self.assertTrue(math.issubnormal(-FLOAT_MIN/2))
+
def testIsnan(self):
self.assertTrue(math.isnan(float("nan")))
self.assertTrue(math.isnan(float("-nan")))
diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py
index 263e4e6f394..a932ac80117 100644
--- a/Lib/test/test_monitoring.py
+++ b/Lib/test/test_monitoring.py
@@ -2157,6 +2157,21 @@ class TestRegressions(MonitoringTestBase, unittest.TestCase):
sys.monitoring.restart_events()
sys.monitoring.set_events(0, 0)
+ def test_134879(self):
+ # gh-134789
+ # Specialized FOR_ITER not incrementing index
+ def foo():
+ t = 0
+ for i in [1,2,3,4]:
+ t += i
+ self.assertEqual(t, 10)
+
+ sys.monitoring.use_tool_id(0, "test")
+ self.addCleanup(sys.monitoring.free_tool_id, 0)
+ sys.monitoring.set_local_events(0, foo.__code__, E.BRANCH_LEFT | E.BRANCH_RIGHT)
+ foo()
+ sys.monitoring.set_local_events(0, foo.__code__, 0)
+
class TestOptimizer(MonitoringTestBase, unittest.TestCase):
diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py
index c3b0bdaebc2..b891d0734ca 100644
--- a/Lib/test/test_ntpath.py
+++ b/Lib/test/test_ntpath.py
@@ -6,6 +6,7 @@ import subprocess
import sys
import unittest
import warnings
+from ntpath import ALLOW_MISSING
from test.support import TestFailed, cpython_only, os_helper
from test.support.os_helper import FakePath
from test import test_genericpath
@@ -76,6 +77,27 @@ def tester(fn, wantResult):
%(str(fn), str(wantResult), repr(gotResult)))
+def _parameterize(*parameters):
+ """Simplistic decorator to parametrize a test
+
+ Runs the decorated test multiple times in subTest, with a value from
+ 'parameters' passed as an extra positional argument.
+ Calls doCleanups() after each run.
+
+ Not for general use. Intended to avoid indenting for easier backports.
+
+ See https://discuss.python.org/t/91827 for discussing generalizations.
+ """
+ def _parametrize_decorator(func):
+ def _parameterized(self, *args, **kwargs):
+ for parameter in parameters:
+ with self.subTest(parameter):
+ func(self, *args, parameter, **kwargs)
+ self.doCleanups()
+ return _parameterized
+ return _parametrize_decorator
+
+
class NtpathTestCase(unittest.TestCase):
def assertPathEqual(self, path1, path2):
if path1 == path2 or _norm(path1) == _norm(path2):
@@ -474,6 +496,27 @@ class TestNtpath(NtpathTestCase):
tester("ntpath.realpath('.\\.')", expected)
tester("ntpath.realpath('\\'.join(['.'] * 100))", expected)
+ def test_realpath_curdir_strict(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('.', strict=True)", expected)
+ tester("ntpath.realpath('./.', strict=True)", expected)
+ tester("ntpath.realpath('/'.join(['.'] * 100), strict=True)", expected)
+ tester("ntpath.realpath('.\\.', strict=True)", expected)
+ tester("ntpath.realpath('\\'.join(['.'] * 100), strict=True)", expected)
+
+ def test_realpath_curdir_missing_ok(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('.', strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('./.', strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('/'.join(['.'] * 100), strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('.\\.', strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('\\'.join(['.'] * 100), strict=ALLOW_MISSING)",
+ expected)
+
def test_realpath_pardir(self):
expected = ntpath.normpath(os.getcwd())
tester("ntpath.realpath('..')", ntpath.dirname(expected))
@@ -486,24 +529,59 @@ class TestNtpath(NtpathTestCase):
tester("ntpath.realpath('\\'.join(['..'] * 50))",
ntpath.splitdrive(expected)[0] + '\\')
+ def test_realpath_pardir_strict(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('..', strict=True)", ntpath.dirname(expected))
+ tester("ntpath.realpath('../..', strict=True)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('/'.join(['..'] * 50), strict=True)",
+ ntpath.splitdrive(expected)[0] + '\\')
+ tester("ntpath.realpath('..\\..', strict=True)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('\\'.join(['..'] * 50), strict=True)",
+ ntpath.splitdrive(expected)[0] + '\\')
+
+ def test_realpath_pardir_missing_ok(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('..', strict=ALLOW_MISSING)",
+ ntpath.dirname(expected))
+ tester("ntpath.realpath('../..', strict=ALLOW_MISSING)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('/'.join(['..'] * 50), strict=ALLOW_MISSING)",
+ ntpath.splitdrive(expected)[0] + '\\')
+ tester("ntpath.realpath('..\\..', strict=ALLOW_MISSING)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('\\'.join(['..'] * 50), strict=ALLOW_MISSING)",
+ ntpath.splitdrive(expected)[0] + '\\')
+
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
- def test_realpath_basic(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_basic(self, kwargs):
ABSTFN = ntpath.abspath(os_helper.TESTFN)
open(ABSTFN, "wb").close()
self.addCleanup(os_helper.unlink, ABSTFN)
self.addCleanup(os_helper.unlink, ABSTFN + "1")
os.symlink(ABSTFN, ABSTFN + "1")
- self.assertPathEqual(ntpath.realpath(ABSTFN + "1"), ABSTFN)
- self.assertPathEqual(ntpath.realpath(os.fsencode(ABSTFN + "1")),
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1", **kwargs), ABSTFN)
+ self.assertPathEqual(ntpath.realpath(os.fsencode(ABSTFN + "1"), **kwargs),
os.fsencode(ABSTFN))
# gh-88013: call ntpath.realpath with binary drive name may raise a
# TypeError. The drive should not exist to reproduce the bug.
drives = {f"{c}:\\" for c in string.ascii_uppercase} - set(os.listdrives())
d = drives.pop().encode()
- self.assertEqual(ntpath.realpath(d), d)
+ self.assertEqual(ntpath.realpath(d, strict=False), d)
+
+ # gh-106242: Embedded nulls and non-strict fallback to abspath
+ if kwargs:
+ with self.assertRaises(OSError):
+ ntpath.realpath(os_helper.TESTFN + "\0spam",
+ **kwargs)
+ else:
+ self.assertEqual(ABSTFN + "\0spam",
+ ntpath.realpath(os_helper.TESTFN + "\0spam", **kwargs))
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
@@ -526,51 +604,66 @@ class TestNtpath(NtpathTestCase):
self.assertEqual(realpath(path, strict=False), path)
# gh-106242: Embedded nulls should raise OSError (not ValueError)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFNb + b'\x00'
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFN + '\\nonexistent\\x\x00'
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFNb + b'\\nonexistent\\x\x00'
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFN + '\x00\\..'
self.assertEqual(realpath(path, strict=False), os.getcwd())
self.assertEqual(realpath(path, strict=True), os.getcwd())
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), os.getcwd())
path = ABSTFNb + b'\x00\\..'
self.assertEqual(realpath(path, strict=False), os.getcwdb())
self.assertEqual(realpath(path, strict=True), os.getcwdb())
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), os.getcwdb())
path = ABSTFN + '\\nonexistent\\x\x00\\..'
self.assertEqual(realpath(path, strict=False), ABSTFN + '\\nonexistent')
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), ABSTFN + '\\nonexistent')
path = ABSTFNb + b'\\nonexistent\\x\x00\\..'
self.assertEqual(realpath(path, strict=False), ABSTFNb + b'\\nonexistent')
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), ABSTFNb + b'\\nonexistent')
+ @unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_invalid_unicode_paths(self, kwargs):
+ realpath = ntpath.realpath
+ ABSTFN = ntpath.abspath(os_helper.TESTFN)
+ ABSTFNb = os.fsencode(ABSTFN)
path = ABSTFNb + b'\xff'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
path = ABSTFNb + b'\\nonexistent\\\xff'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
path = ABSTFNb + b'\xff\\..'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
path = ABSTFNb + b'\\nonexistent\\\xff\\..'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
- def test_realpath_relative(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_relative(self, kwargs):
ABSTFN = ntpath.abspath(os_helper.TESTFN)
open(ABSTFN, "wb").close()
self.addCleanup(os_helper.unlink, ABSTFN)
self.addCleanup(os_helper.unlink, ABSTFN + "1")
os.symlink(ABSTFN, ntpath.relpath(ABSTFN + "1"))
- self.assertPathEqual(ntpath.realpath(ABSTFN + "1"), ABSTFN)
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1", **kwargs), ABSTFN)
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
@@ -722,7 +815,62 @@ class TestNtpath(NtpathTestCase):
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
- def test_realpath_symlink_prefix(self):
+ def test_realpath_symlink_loops_raise(self):
+ # Symlink loops raise OSError in ALLOW_MISSING mode
+ ABSTFN = ntpath.abspath(os_helper.TESTFN)
+ self.addCleanup(os_helper.unlink, ABSTFN)
+ self.addCleanup(os_helper.unlink, ABSTFN + "1")
+ self.addCleanup(os_helper.unlink, ABSTFN + "2")
+ self.addCleanup(os_helper.unlink, ABSTFN + "y")
+ self.addCleanup(os_helper.unlink, ABSTFN + "c")
+ self.addCleanup(os_helper.unlink, ABSTFN + "a")
+ self.addCleanup(os_helper.unlink, ABSTFN + "x")
+
+ os.symlink(ABSTFN, ABSTFN)
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN, strict=ALLOW_MISSING)
+
+ os.symlink(ABSTFN + "1", ABSTFN + "2")
+ os.symlink(ABSTFN + "2", ABSTFN + "1")
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "1",
+ strict=ALLOW_MISSING)
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "2",
+ strict=ALLOW_MISSING)
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "1\\x",
+ strict=ALLOW_MISSING)
+
+ # Windows eliminates '..' components before resolving links;
+ # realpath is not expected to raise if this removes the loop.
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1\\.."),
+ ntpath.dirname(ABSTFN))
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1\\..\\x"),
+ ntpath.dirname(ABSTFN) + "\\x")
+
+ os.symlink(ABSTFN + "x", ABSTFN + "y")
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1\\..\\"
+ + ntpath.basename(ABSTFN) + "y"),
+ ABSTFN + "x")
+ self.assertRaises(
+ OSError, ntpath.realpath,
+ ABSTFN + "1\\..\\" + ntpath.basename(ABSTFN) + "1",
+ strict=ALLOW_MISSING)
+
+ os.symlink(ntpath.basename(ABSTFN) + "a\\b", ABSTFN + "a")
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "a",
+ strict=ALLOW_MISSING)
+
+ os.symlink("..\\" + ntpath.basename(ntpath.dirname(ABSTFN))
+ + "\\" + ntpath.basename(ABSTFN) + "c", ABSTFN + "c")
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "c",
+ strict=ALLOW_MISSING)
+
+ # Test using relative path as well.
+ self.assertRaises(OSError, ntpath.realpath, ntpath.basename(ABSTFN),
+ strict=ALLOW_MISSING)
+
+ @os_helper.skip_unless_symlink
+ @unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_symlink_prefix(self, kwargs):
ABSTFN = ntpath.abspath(os_helper.TESTFN)
self.addCleanup(os_helper.unlink, ABSTFN + "3")
self.addCleanup(os_helper.unlink, "\\\\?\\" + ABSTFN + "3.")
@@ -737,9 +885,9 @@ class TestNtpath(NtpathTestCase):
f.write(b'1')
os.symlink("\\\\?\\" + ABSTFN + "3.", ABSTFN + "3.link")
- self.assertPathEqual(ntpath.realpath(ABSTFN + "3link"),
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "3link", **kwargs),
ABSTFN + "3")
- self.assertPathEqual(ntpath.realpath(ABSTFN + "3.link"),
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "3.link", **kwargs),
"\\\\?\\" + ABSTFN + "3.")
# Resolved paths should be usable to open target files
@@ -749,14 +897,17 @@ class TestNtpath(NtpathTestCase):
self.assertEqual(f.read(), b'1')
# When the prefix is included, it is not stripped
- self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3link"),
+ self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3link", **kwargs),
"\\\\?\\" + ABSTFN + "3")
- self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3.link"),
+ self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3.link", **kwargs),
"\\\\?\\" + ABSTFN + "3.")
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
def test_realpath_nul(self):
tester("ntpath.realpath('NUL')", r'\\.\NUL')
+ tester("ntpath.realpath('NUL', strict=False)", r'\\.\NUL')
+ tester("ntpath.realpath('NUL', strict=True)", r'\\.\NUL')
+ tester("ntpath.realpath('NUL', strict=ALLOW_MISSING)", r'\\.\NUL')
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
@unittest.skipUnless(HAVE_GETSHORTPATHNAME, 'need _getshortpathname')
@@ -780,12 +931,20 @@ class TestNtpath(NtpathTestCase):
self.assertPathEqual(test_file_long, ntpath.realpath(test_file_short))
- with os_helper.change_cwd(test_dir_long):
- self.assertPathEqual(test_file_long, ntpath.realpath("file.txt"))
- with os_helper.change_cwd(test_dir_long.lower()):
- self.assertPathEqual(test_file_long, ntpath.realpath("file.txt"))
- with os_helper.change_cwd(test_dir_short):
- self.assertPathEqual(test_file_long, ntpath.realpath("file.txt"))
+ for kwargs in {}, {'strict': True}, {'strict': ALLOW_MISSING}:
+ with self.subTest(**kwargs):
+ with os_helper.change_cwd(test_dir_long):
+ self.assertPathEqual(
+ test_file_long,
+ ntpath.realpath("file.txt", **kwargs))
+ with os_helper.change_cwd(test_dir_long.lower()):
+ self.assertPathEqual(
+ test_file_long,
+ ntpath.realpath("file.txt", **kwargs))
+ with os_helper.change_cwd(test_dir_short):
+ self.assertPathEqual(
+ test_file_long,
+ ntpath.realpath("file.txt", **kwargs))
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
def test_realpath_permission(self):
@@ -806,12 +965,15 @@ class TestNtpath(NtpathTestCase):
# Automatic generation of short names may be disabled on
# NTFS volumes for the sake of performance.
# They're not supported at all on ReFS and exFAT.
- subprocess.run(
+ p = subprocess.run(
# Try to set the short name manually.
['fsutil.exe', 'file', 'setShortName', test_file, 'LONGFI~1.TXT'],
creationflags=subprocess.DETACHED_PROCESS
)
+ if p.returncode:
+ raise unittest.SkipTest('failed to set short name')
+
try:
self.assertPathEqual(test_file, ntpath.realpath(test_file_short))
except AssertionError:
diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py
index f3f9895f529..c45ce6d3ef7 100644
--- a/Lib/test/test_posixpath.py
+++ b/Lib/test/test_posixpath.py
@@ -4,7 +4,8 @@ import posixpath
import random
import sys
import unittest
-from posixpath import realpath, abspath, dirname, basename
+from functools import partial
+from posixpath import realpath, abspath, dirname, basename, ALLOW_MISSING
from test import support
from test import test_genericpath
from test.support import import_helper
@@ -33,6 +34,27 @@ def skip_if_ABSTFN_contains_backslash(test):
msg = "ABSTFN is not a posix path - tests fail"
return [test, unittest.skip(msg)(test)][found_backslash]
+
+def _parameterize(*parameters):
+ """Simplistic decorator to parametrize a test
+
+ Runs the decorated test multiple times in subTest, with a value from
+ 'parameters' passed as an extra positional argument.
+ Does *not* call doCleanups() after each run.
+
+ Not for general use. Intended to avoid indenting for easier backports.
+
+ See https://discuss.python.org/t/91827 for discussing generalizations.
+ """
+ def _parametrize_decorator(func):
+ def _parameterized(self, *args, **kwargs):
+ for parameter in parameters:
+ with self.subTest(parameter):
+ func(self, *args, parameter, **kwargs)
+ return _parameterized
+ return _parametrize_decorator
+
+
class PosixPathTest(unittest.TestCase):
def setUp(self):
@@ -442,32 +464,35 @@ class PosixPathTest(unittest.TestCase):
self.assertEqual(result, expected)
@skip_if_ABSTFN_contains_backslash
- def test_realpath_curdir(self):
- self.assertEqual(realpath('.'), os.getcwd())
- self.assertEqual(realpath('./.'), os.getcwd())
- self.assertEqual(realpath('/'.join(['.'] * 100)), os.getcwd())
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_curdir(self, kwargs):
+ self.assertEqual(realpath('.', **kwargs), os.getcwd())
+ self.assertEqual(realpath('./.', **kwargs), os.getcwd())
+ self.assertEqual(realpath('/'.join(['.'] * 100), **kwargs), os.getcwd())
- self.assertEqual(realpath(b'.'), os.getcwdb())
- self.assertEqual(realpath(b'./.'), os.getcwdb())
- self.assertEqual(realpath(b'/'.join([b'.'] * 100)), os.getcwdb())
+ self.assertEqual(realpath(b'.', **kwargs), os.getcwdb())
+ self.assertEqual(realpath(b'./.', **kwargs), os.getcwdb())
+ self.assertEqual(realpath(b'/'.join([b'.'] * 100), **kwargs), os.getcwdb())
@skip_if_ABSTFN_contains_backslash
- def test_realpath_pardir(self):
- self.assertEqual(realpath('..'), dirname(os.getcwd()))
- self.assertEqual(realpath('../..'), dirname(dirname(os.getcwd())))
- self.assertEqual(realpath('/'.join(['..'] * 100)), '/')
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_pardir(self, kwargs):
+ self.assertEqual(realpath('..', **kwargs), dirname(os.getcwd()))
+ self.assertEqual(realpath('../..', **kwargs), dirname(dirname(os.getcwd())))
+ self.assertEqual(realpath('/'.join(['..'] * 100), **kwargs), '/')
- self.assertEqual(realpath(b'..'), dirname(os.getcwdb()))
- self.assertEqual(realpath(b'../..'), dirname(dirname(os.getcwdb())))
- self.assertEqual(realpath(b'/'.join([b'..'] * 100)), b'/')
+ self.assertEqual(realpath(b'..', **kwargs), dirname(os.getcwdb()))
+ self.assertEqual(realpath(b'../..', **kwargs), dirname(dirname(os.getcwdb())))
+ self.assertEqual(realpath(b'/'.join([b'..'] * 100), **kwargs), b'/')
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_basic(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_basic(self, kwargs):
# Basic operation.
try:
os.symlink(ABSTFN+"1", ABSTFN)
- self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+ self.assertEqual(realpath(ABSTFN, **kwargs), ABSTFN+"1")
finally:
os_helper.unlink(ABSTFN)
@@ -487,90 +512,115 @@ class PosixPathTest(unittest.TestCase):
path = '/\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = '/nonexistent/x\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/nonexistent/x\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = '/\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
+
path = '/nonexistent/x\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/nonexistent/x\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = '/\udfff'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), path)
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
self.assertRaises(UnicodeEncodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
path = '/nonexistent/\udfff'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), path)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), path)
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
path = '/\udfff/..'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), '/')
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), '/')
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
self.assertRaises(UnicodeEncodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
path = '/nonexistent/\udfff/..'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), '/nonexistent')
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), '/nonexistent')
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
path = b'/\xff'
if sys.platform == 'win32':
self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertEqual(realpath(path, strict=False), path)
if support.is_wasi:
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), path)
path = b'/nonexistent/\xff'
if sys.platform == 'win32':
self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
+ self.assertRaises(UnicodeDecodeError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertEqual(realpath(path, strict=False), path)
if support.is_wasi:
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_relative(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_relative(self, kwargs):
try:
os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN)
- self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+ self.assertEqual(realpath(ABSTFN, **kwargs), ABSTFN+"1")
finally:
os_helper.unlink(ABSTFN)
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_missing_pardir(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_missing_pardir(self, kwargs):
try:
os.symlink(TESTFN + "1", TESTFN)
- self.assertEqual(realpath("nonexistent/../" + TESTFN), ABSTFN + "1")
+ self.assertEqual(
+ realpath("nonexistent/../" + TESTFN, **kwargs), ABSTFN + "1")
finally:
os_helper.unlink(TESTFN)
@@ -617,37 +667,38 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_symlink_loops_strict(self):
+ @_parameterize({'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_symlink_loops_strict(self, kwargs):
# Bug #43757, raise OSError if we get into an infinite symlink loop in
- # strict mode.
+ # the strict modes.
try:
os.symlink(ABSTFN, ABSTFN)
- self.assertRaises(OSError, realpath, ABSTFN, strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN, **kwargs)
os.symlink(ABSTFN+"1", ABSTFN+"2")
os.symlink(ABSTFN+"2", ABSTFN+"1")
- self.assertRaises(OSError, realpath, ABSTFN+"1", strict=True)
- self.assertRaises(OSError, realpath, ABSTFN+"2", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"1", **kwargs)
+ self.assertRaises(OSError, realpath, ABSTFN+"2", **kwargs)
- self.assertRaises(OSError, realpath, ABSTFN+"1/x", strict=True)
- self.assertRaises(OSError, realpath, ABSTFN+"1/..", strict=True)
- self.assertRaises(OSError, realpath, ABSTFN+"1/../x", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"1/x", **kwargs)
+ self.assertRaises(OSError, realpath, ABSTFN+"1/..", **kwargs)
+ self.assertRaises(OSError, realpath, ABSTFN+"1/../x", **kwargs)
os.symlink(ABSTFN+"x", ABSTFN+"y")
self.assertRaises(OSError, realpath,
- ABSTFN+"1/../" + basename(ABSTFN) + "y", strict=True)
+ ABSTFN+"1/../" + basename(ABSTFN) + "y", **kwargs)
self.assertRaises(OSError, realpath,
- ABSTFN+"1/../" + basename(ABSTFN) + "1", strict=True)
+ ABSTFN+"1/../" + basename(ABSTFN) + "1", **kwargs)
os.symlink(basename(ABSTFN) + "a/b", ABSTFN+"a")
- self.assertRaises(OSError, realpath, ABSTFN+"a", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"a", **kwargs)
os.symlink("../" + basename(dirname(ABSTFN)) + "/" +
basename(ABSTFN) + "c", ABSTFN+"c")
- self.assertRaises(OSError, realpath, ABSTFN+"c", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"c", **kwargs)
# Test using relative path as well.
with os_helper.change_cwd(dirname(ABSTFN)):
- self.assertRaises(OSError, realpath, basename(ABSTFN), strict=True)
+ self.assertRaises(OSError, realpath, basename(ABSTFN), **kwargs)
finally:
os_helper.unlink(ABSTFN)
os_helper.unlink(ABSTFN+"1")
@@ -658,13 +709,14 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_repeated_indirect_symlinks(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_repeated_indirect_symlinks(self, kwargs):
# Issue #6975.
try:
os.mkdir(ABSTFN)
os.symlink('../' + basename(ABSTFN), ABSTFN + '/self')
os.symlink('self/self/self', ABSTFN + '/link')
- self.assertEqual(realpath(ABSTFN + '/link'), ABSTFN)
+ self.assertEqual(realpath(ABSTFN + '/link', **kwargs), ABSTFN)
finally:
os_helper.unlink(ABSTFN + '/self')
os_helper.unlink(ABSTFN + '/link')
@@ -672,14 +724,15 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_deep_recursion(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_deep_recursion(self, kwargs):
depth = 10
try:
os.mkdir(ABSTFN)
for i in range(depth):
os.symlink('/'.join(['%d' % i] * 10), ABSTFN + '/%d' % (i + 1))
os.symlink('.', ABSTFN + '/0')
- self.assertEqual(realpath(ABSTFN + '/%d' % depth), ABSTFN)
+ self.assertEqual(realpath(ABSTFN + '/%d' % depth, **kwargs), ABSTFN)
# Test using relative path as well.
with os_helper.change_cwd(ABSTFN):
@@ -691,7 +744,8 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_parents(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_resolve_parents(self, kwargs):
# We also need to resolve any symlinks in the parents of a relative
# path passed to realpath. E.g.: current working directory is
# /usr/doc with 'doc' being a symlink to /usr/share/doc. We call
@@ -702,7 +756,8 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "/y", ABSTFN + "/k")
with os_helper.change_cwd(ABSTFN + "/k"):
- self.assertEqual(realpath("a"), ABSTFN + "/y/a")
+ self.assertEqual(realpath("a", **kwargs),
+ ABSTFN + "/y/a")
finally:
os_helper.unlink(ABSTFN + "/k")
os_helper.rmdir(ABSTFN + "/y")
@@ -710,7 +765,8 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_before_normalizing(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_resolve_before_normalizing(self, kwargs):
# Bug #990669: Symbolic links should be resolved before we
# normalize the path. E.g.: if we have directories 'a', 'k' and 'y'
# in the following hierarchy:
@@ -725,10 +781,10 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y")
# Absolute path.
- self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k")
+ self.assertEqual(realpath(ABSTFN + "/link-y/..", **kwargs), ABSTFN + "/k")
# Relative path.
with os_helper.change_cwd(dirname(ABSTFN)):
- self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
+ self.assertEqual(realpath(basename(ABSTFN) + "/link-y/..", **kwargs),
ABSTFN + "/k")
finally:
os_helper.unlink(ABSTFN + "/link-y")
@@ -738,7 +794,8 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_first(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_resolve_first(self, kwargs):
# Bug #1213894: The first component of the path, if not absolute,
# must be resolved too.
@@ -748,8 +805,8 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN, ABSTFN + "link")
with os_helper.change_cwd(dirname(ABSTFN)):
base = basename(ABSTFN)
- self.assertEqual(realpath(base + "link"), ABSTFN)
- self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
+ self.assertEqual(realpath(base + "link", **kwargs), ABSTFN)
+ self.assertEqual(realpath(base + "link/k", **kwargs), ABSTFN + "/k")
finally:
os_helper.unlink(ABSTFN + "link")
os_helper.rmdir(ABSTFN + "/k")
@@ -767,12 +824,67 @@ class PosixPathTest(unittest.TestCase):
self.assertEqual(realpath(ABSTFN + '/foo'), ABSTFN + '/foo')
self.assertEqual(realpath(ABSTFN + '/../foo'), dirname(ABSTFN) + '/foo')
self.assertEqual(realpath(ABSTFN + '/foo/..'), ABSTFN)
- with self.assertRaises(PermissionError):
- realpath(ABSTFN, strict=True)
finally:
os.chmod(ABSTFN, 0o755, follow_symlinks=False)
os_helper.unlink(ABSTFN)
+ @os_helper.skip_unless_symlink
+ @skip_if_ABSTFN_contains_backslash
+ @unittest.skipIf(os.chmod not in os.supports_follow_symlinks, "Can't set symlink permissions")
+ @unittest.skipIf(sys.platform != "darwin", "only macOS requires read permission to readlink()")
+ @_parameterize({'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_unreadable_symlink_strict(self, kwargs):
+ try:
+ os.symlink(ABSTFN+"1", ABSTFN)
+ os.chmod(ABSTFN, 0o000, follow_symlinks=False)
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN, **kwargs)
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN + '/foo', **kwargs),
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN + '/../foo', **kwargs)
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN + '/foo/..', **kwargs)
+ finally:
+ os.chmod(ABSTFN, 0o755, follow_symlinks=False)
+ os.unlink(ABSTFN)
+
+ @skip_if_ABSTFN_contains_backslash
+ @os_helper.skip_unless_symlink
+ def test_realpath_unreadable_directory(self):
+ try:
+ os.mkdir(ABSTFN)
+ os.mkdir(ABSTFN + '/k')
+ os.chmod(ABSTFN, 0o000)
+ self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN)
+ self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN)
+ self.assertEqual(realpath(ABSTFN, strict=ALLOW_MISSING), ABSTFN)
+
+ try:
+ os.stat(ABSTFN)
+ except PermissionError:
+ pass
+ else:
+ self.skipTest('Cannot block permissions')
+
+ self.assertEqual(realpath(ABSTFN + '/k', strict=False),
+ ABSTFN + '/k')
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/k',
+ strict=True)
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/k',
+ strict=ALLOW_MISSING)
+
+ self.assertEqual(realpath(ABSTFN + '/missing', strict=False),
+ ABSTFN + '/missing')
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/missing',
+ strict=True)
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/missing',
+ strict=ALLOW_MISSING)
+ finally:
+ os.chmod(ABSTFN, 0o755)
+ os_helper.rmdir(ABSTFN + '/k')
+ os_helper.rmdir(ABSTFN)
+
@skip_if_ABSTFN_contains_backslash
def test_realpath_nonterminal_file(self):
try:
@@ -780,14 +892,27 @@ class PosixPathTest(unittest.TestCase):
f.write('test_posixpath wuz ere')
self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN)
self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN)
+ self.assertEqual(realpath(ABSTFN, strict=ALLOW_MISSING), ABSTFN)
+
self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN)
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN)
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN))
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "/subdir")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir",
+ strict=ALLOW_MISSING)
finally:
os_helper.unlink(ABSTFN)
@@ -800,14 +925,27 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "1", ABSTFN)
self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "1")
self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "1")
+ self.assertEqual(realpath(ABSTFN, strict=ALLOW_MISSING), ABSTFN + "1")
+
self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "1")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "1")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN))
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "1/subdir")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir",
+ strict=ALLOW_MISSING)
finally:
os_helper.unlink(ABSTFN)
os_helper.unlink(ABSTFN + "1")
@@ -822,14 +960,27 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "1", ABSTFN)
self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "2")
self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "2")
+ self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "2")
+
self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "2")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "2")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN))
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "2/subdir")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir",
+ strict=ALLOW_MISSING)
finally:
os_helper.unlink(ABSTFN)
os_helper.unlink(ABSTFN + "1")
@@ -1017,9 +1168,12 @@ class PathLikeTests(unittest.TestCase):
def test_path_abspath(self):
self.assertPathEqual(self.path.abspath)
- def test_path_realpath(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_path_realpath(self, kwargs):
self.assertPathEqual(self.path.realpath)
+ self.assertPathEqual(partial(self.path.realpath, **kwargs))
+
def test_path_relpath(self):
self.assertPathEqual(self.path.relpath)
diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py
index abb4bd1bc25..98bae7dd703 100644
--- a/Lib/test/test_pyrepl/test_pyrepl.py
+++ b/Lib/test/test_pyrepl/test_pyrepl.py
@@ -918,7 +918,14 @@ class TestPyReplCompleter(TestCase):
class TestPyReplModuleCompleter(TestCase):
def setUp(self):
+ import importlib
+ # Make iter_modules() search only the standard library.
+ # This makes the test more reliable in case there are
+ # other user packages/scripts on PYTHONPATH which can
+ # interfere with the completions.
+ lib_path = os.path.dirname(importlib.__path__[0])
self._saved_sys_path = sys.path
+ sys.path = [lib_path]
def tearDown(self):
sys.path = self._saved_sys_path
@@ -932,14 +939,6 @@ class TestPyReplModuleCompleter(TestCase):
return reader
def test_import_completions(self):
- import importlib
- # Make iter_modules() search only the standard library.
- # This makes the test more reliable in case there are
- # other user packages/scripts on PYTHONPATH which can
- # intefere with the completions.
- lib_path = os.path.dirname(importlib.__path__[0])
- sys.path = [lib_path]
-
cases = (
("import path\t\n", "import pathlib"),
("import importlib.\t\tres\t\n", "import importlib.resources"),
@@ -1052,6 +1051,19 @@ class TestPyReplModuleCompleter(TestCase):
output = reader.readline()
self.assertEqual(output, expected)
+ def test_no_fallback_on_regular_completion(self):
+ cases = (
+ ("import pri\t\n", "import pri"),
+ ("from pri\t\n", "from pri"),
+ ("from typing import Na\t\n", "from typing import Na"),
+ )
+ for code, expected in cases:
+ with self.subTest(code=code):
+ events = code_to_events(code)
+ reader = self.prepare_reader(events, namespace={})
+ output = reader.readline()
+ self.assertEqual(output, expected)
+
def test_get_path_and_prefix(self):
cases = (
('', ('', '')),
@@ -1660,6 +1672,17 @@ class TestMain(ReplTestCase):
self.assertEqual(exit_code, 0)
self.assertNotIn("TypeError", output)
+ @force_not_colorized
+ def test_non_string_suggestion_candidates(self):
+ commands = ("import runpy\n"
+ "runpy._run_module_code('blech', {0: '', 'bluch': ''}, '')\n"
+ "exit()\n")
+
+ output, exit_code = self.run_repl(commands)
+ self.assertEqual(exit_code, 0)
+ self.assertNotIn("all elements in 'candidates' must be strings", output)
+ self.assertIn("bluch", output)
+
def test_readline_history_file(self):
# skip, if readline module is not available
readline = import_module('readline')
diff --git a/Lib/test/test_queue.py b/Lib/test/test_queue.py
index 7f4fe357034..c855fb8fe2b 100644
--- a/Lib/test/test_queue.py
+++ b/Lib/test/test_queue.py
@@ -6,7 +6,7 @@ import threading
import time
import unittest
import weakref
-from test.support import gc_collect
+from test.support import gc_collect, bigmemtest
from test.support import import_helper
from test.support import threading_helper
@@ -963,33 +963,33 @@ class BaseSimpleQueueTest:
# One producer, one consumer => results appended in well-defined order
self.assertEqual(results, inputs)
- def test_many_threads(self):
+ @bigmemtest(size=50, memuse=100*2**20, dry_run=False)
+ def test_many_threads(self, size):
# Test multiple concurrent put() and get()
- N = 50
q = self.q
inputs = list(range(10000))
- results = self.run_threads(N, q, inputs, self.feed, self.consume)
+ results = self.run_threads(size, q, inputs, self.feed, self.consume)
# Multiple consumers without synchronization append the
# results in random order
self.assertEqual(sorted(results), inputs)
- def test_many_threads_nonblock(self):
+ @bigmemtest(size=50, memuse=100*2**20, dry_run=False)
+ def test_many_threads_nonblock(self, size):
# Test multiple concurrent put() and get(block=False)
- N = 50
q = self.q
inputs = list(range(10000))
- results = self.run_threads(N, q, inputs,
+ results = self.run_threads(size, q, inputs,
self.feed, self.consume_nonblock)
self.assertEqual(sorted(results), inputs)
- def test_many_threads_timeout(self):
+ @bigmemtest(size=50, memuse=100*2**20, dry_run=False)
+ def test_many_threads_timeout(self, size):
# Test multiple concurrent put() and get(timeout=...)
- N = 50
q = self.q
inputs = list(range(1000))
- results = self.run_threads(N, q, inputs,
+ results = self.run_threads(size, q, inputs,
self.feed, self.consume_timeout)
self.assertEqual(sorted(results), inputs)
diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py
index bd76d636e4f..54910cd8054 100644
--- a/Lib/test/test_random.py
+++ b/Lib/test/test_random.py
@@ -392,6 +392,8 @@ class TestBasicOps:
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
+ self.assertRaises(OverflowError, self.gen.getrandbits, 1<<1000)
+ self.assertRaises(ValueError, self.gen.getrandbits, -1<<1000)
self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
def test_pickling(self):
@@ -435,6 +437,8 @@ class TestBasicOps:
self.assertRaises(TypeError, self.gen.randbytes)
self.assertRaises(TypeError, self.gen.randbytes, 1, 2)
self.assertRaises(ValueError, self.gen.randbytes, -1)
+ self.assertRaises(OverflowError, self.gen.randbytes, 1<<1000)
+ self.assertRaises((ValueError, OverflowError), self.gen.randbytes, -1<<1000)
self.assertRaises(TypeError, self.gen.randbytes, 1.0)
def test_mu_sigma_default_args(self):
@@ -806,6 +810,22 @@ class MersenneTwister_TestBasicOps(TestBasicOps, unittest.TestCase):
self.assertEqual(self.gen.getrandbits(100),
97904845777343510404718956115)
+ def test_getrandbits_2G_bits(self):
+ size = 2**31
+ self.gen.seed(1234567)
+ x = self.gen.getrandbits(size)
+ self.assertEqual(x.bit_length(), size)
+ self.assertEqual(x & (2**100-1), 890186470919986886340158459475)
+ self.assertEqual(x >> (size-100), 1226514312032729439655761284440)
+
+ @support.bigmemtest(size=2**32, memuse=1/8+2/15, dry_run=False)
+ def test_getrandbits_4G_bits(self, size):
+ self.gen.seed(1234568)
+ x = self.gen.getrandbits(size)
+ self.assertEqual(x.bit_length(), size)
+ self.assertEqual(x & (2**100-1), 287241425661104632871036099814)
+ self.assertEqual(x >> (size-100), 739728759900339699429794460738)
+
def test_randrange_uses_getrandbits(self):
# Verify use of getrandbits by randrange
# Use same seed as in the cross-platform repeatability test
@@ -962,6 +982,14 @@ class MersenneTwister_TestBasicOps(TestBasicOps, unittest.TestCase):
self.assertEqual(self.gen.randbytes(n),
gen2.getrandbits(n * 8).to_bytes(n, 'little'))
+ @support.bigmemtest(size=2**29, memuse=1+16/15, dry_run=False)
+ def test_randbytes_256M(self, size):
+ self.gen.seed(2849427419)
+ x = self.gen.randbytes(size)
+ self.assertEqual(len(x), size)
+ self.assertEqual(x[:12].hex(), 'f6fd9ae63855ab91ea238b4f')
+ self.assertEqual(x[-12:].hex(), '0e7af69a84ee99bf4a11becc')
+
def test_sample_counts_equivalence(self):
# Test the documented strong equivalence to a sample with repeated elements.
# We run this test on random.Random() which makes deterministic selections
diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py
index b9d082b3597..45192fe5082 100644
--- a/Lib/test/test_readline.py
+++ b/Lib/test/test_readline.py
@@ -1,6 +1,7 @@
"""
Very minimal unittests for parts of the readline module.
"""
+import codecs
import locale
import os
import sys
@@ -231,6 +232,13 @@ print("History length:", readline.get_current_history_length())
# writing and reading non-ASCII bytes into/from a TTY works, but
# readline or ncurses ignores non-ASCII bytes on read.
self.skipTest(f"the LC_CTYPE locale is {loc!r}")
+ if sys.flags.utf8_mode:
+ encoding = locale.getencoding()
+ encoding = codecs.lookup(encoding).name # normalize the name
+ if encoding != "utf-8":
+ # gh-133711: The Python UTF-8 Mode ignores the LC_CTYPE locale
+ # and always use the UTF-8 encoding.
+ self.skipTest(f"the LC_CTYPE encoding is {encoding!r}")
try:
readline.add_history("\xEB\xEF")
diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py
index 7e317d5ab94..f3ac301686b 100644
--- a/Lib/test/test_regrtest.py
+++ b/Lib/test/test_regrtest.py
@@ -874,7 +874,10 @@ class ProgramsTestCase(BaseTestCase):
self.run_tests(args)
def run_batch(self, *args):
- proc = self.run_command(args)
+ proc = self.run_command(args,
+ # gh-133711: cmd.exe uses the OEM code page
+ # to display the non-ASCII current directory
+ errors="backslashreplace")
self.check_output(proc.stdout)
@unittest.skipUnless(sysconfig.is_python_build(),
@@ -2064,7 +2067,7 @@ class ArgsTestCase(BaseTestCase):
self.check_executed_tests(output, [testname],
failed=[testname],
parallel=True,
- stats=TestStats(1, 1, 0))
+ stats=TestStats(1, 2, 1))
def _check_random_seed(self, run_workers: bool):
# gh-109276: When -r/--randomize is used, random.seed() is called
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index 2767a53d53c..f123f6ece40 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -31,6 +31,7 @@ import weakref
import platform
import sysconfig
import functools
+from contextlib import nullcontext
try:
import ctypes
except ImportError:
@@ -2843,6 +2844,7 @@ class ThreadedTests(unittest.TestCase):
# See GH-124984: OpenSSL is not thread safe.
threads = []
+ warnings_filters = sys.flags.context_aware_warnings
global USE_SAME_TEST_CONTEXT
USE_SAME_TEST_CONTEXT = True
try:
@@ -2851,7 +2853,10 @@ class ThreadedTests(unittest.TestCase):
self.test_alpn_protocols,
self.test_getpeercert,
self.test_crl_check,
- self.test_check_hostname_idn,
+ functools.partial(
+ self.test_check_hostname_idn,
+ warnings_filters=warnings_filters,
+ ),
self.test_wrong_cert_tls12,
self.test_wrong_cert_tls13,
):
@@ -3097,7 +3102,7 @@ class ThreadedTests(unittest.TestCase):
cipher = s.cipher()[0].split('-')
self.assertTrue(cipher[:2], ('ECDHE', 'ECDSA'))
- def test_check_hostname_idn(self):
+ def test_check_hostname_idn(self, warnings_filters=True):
if support.verbose:
sys.stdout.write("\n")
@@ -3152,16 +3157,30 @@ class ThreadedTests(unittest.TestCase):
server_hostname="python.example.org") as s:
with self.assertRaises(ssl.CertificateError):
s.connect((HOST, server.port))
- with ThreadedEchoServer(context=server_context, chatty=True) as server:
- with warnings_helper.check_no_resource_warning(self):
- with self.assertRaises(UnicodeError):
- context.wrap_socket(socket.socket(),
- server_hostname='.pythontest.net')
- with ThreadedEchoServer(context=server_context, chatty=True) as server:
- with warnings_helper.check_no_resource_warning(self):
- with self.assertRaises(UnicodeDecodeError):
- context.wrap_socket(socket.socket(),
- server_hostname=b'k\xf6nig.idn.pythontest.net')
+ with (
+ ThreadedEchoServer(context=server_context, chatty=True) as server,
+ (
+ warnings_helper.check_no_resource_warning(self)
+ if warnings_filters
+ else nullcontext()
+ ),
+ self.assertRaises(UnicodeError),
+ ):
+ context.wrap_socket(socket.socket(), server_hostname='.pythontest.net')
+
+ with (
+ ThreadedEchoServer(context=server_context, chatty=True) as server,
+ (
+ warnings_helper.check_no_resource_warning(self)
+ if warnings_filters
+ else nullcontext()
+ ),
+ self.assertRaises(UnicodeDecodeError),
+ ):
+ context.wrap_socket(
+ socket.socket(),
+ server_hostname=b'k\xf6nig.idn.pythontest.net',
+ )
def test_wrong_cert_tls12(self):
"""Connecting when the server rejects the client's certificate
diff --git a/Lib/test/test_stable_abi_ctypes.py b/Lib/test/test_stable_abi_ctypes.py
index 1e6f69d49e9..5a6ba9de337 100644
--- a/Lib/test/test_stable_abi_ctypes.py
+++ b/Lib/test/test_stable_abi_ctypes.py
@@ -658,7 +658,11 @@ SYMBOL_NAMES = (
"PySys_AuditTuple",
"PySys_FormatStderr",
"PySys_FormatStdout",
+ "PySys_GetAttr",
+ "PySys_GetAttrString",
"PySys_GetObject",
+ "PySys_GetOptionalAttr",
+ "PySys_GetOptionalAttrString",
"PySys_GetXOptions",
"PySys_HasWarnOptions",
"PySys_ResetWarnOptions",
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 795d1ecbb59..bf415894903 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -729,7 +729,7 @@ class SysModuleTest(unittest.TestCase):
info = sys.thread_info
self.assertEqual(len(info), 3)
self.assertIn(info.name, ('nt', 'pthread', 'pthread-stubs', 'solaris', None))
- self.assertIn(info.lock, ('semaphore', 'mutex+cond', None))
+ self.assertIn(info.lock, ('pymutex', None))
if sys.platform.startswith(("linux", "android", "freebsd")):
self.assertEqual(info.name, "pthread")
elif sys.platform == "win32":
@@ -1135,23 +1135,12 @@ class SysModuleTest(unittest.TestCase):
b = sys.getallocatedblocks()
self.assertLessEqual(b, a)
try:
- # While we could imagine a Python session where the number of
- # multiple buffer objects would exceed the sharing of references,
- # it is unlikely to happen in a normal test run.
- #
- # In free-threaded builds each code object owns an array of
- # pointers to copies of the bytecode. When the number of
- # code objects is a large fraction of the total number of
- # references, this can cause the total number of allocated
- # blocks to exceed the total number of references.
- #
- # For some reason, iOS seems to trigger the "unlikely to happen"
- # case reliably under CI conditions. It's not clear why; but as
- # this test is checking the behavior of getallocatedblock()
- # under garbage collection, we can skip this pre-condition check
- # for now. See GH-130384.
- if not support.Py_GIL_DISABLED and not support.is_apple_mobile:
- self.assertLess(a, sys.gettotalrefcount())
+ # The reported blocks will include immortalized strings, but the
+ # total ref count will not. This will sanity check that among all
+ # other objects (those eligible for garbage collection) there
+ # are more references being tracked than allocated blocks.
+ interned_immortal = sys.getunicodeinternedsize(_only_immortal=True)
+ self.assertLess(a - interned_immortal, sys.gettotalrefcount())
except AttributeError:
# gettotalrefcount() not available
pass
@@ -1299,6 +1288,7 @@ class SysModuleTest(unittest.TestCase):
for name in sys.stdlib_module_names:
self.assertIsInstance(name, str)
+ @unittest.skipUnless(hasattr(sys, '_stdlib_dir'), 'need sys._stdlib_dir')
def test_stdlib_dir(self):
os = import_helper.import_fresh_module('os')
marker = getattr(os, '__file__', None)
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index cf218a2bf14..7055e1ed147 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -2715,6 +2715,31 @@ class MiscTest(unittest.TestCase):
str(excinfo.exception),
)
+ @unittest.skipUnless(os_helper.can_symlink(), 'requires symlink support')
+ @unittest.skipUnless(hasattr(os, 'chmod'), "missing os.chmod")
+ @unittest.mock.patch('os.chmod')
+ def test_deferred_directory_attributes_update(self, mock_chmod):
+ # Regression test for gh-127987: setting attributes on arbitrary files
+ tempdir = os.path.join(TEMPDIR, 'test127987')
+ def mock_chmod_side_effect(path, mode, **kwargs):
+ target_path = os.path.realpath(path)
+ if os.path.commonpath([target_path, tempdir]) != tempdir:
+ raise Exception("should not try to chmod anything outside the destination", target_path)
+ mock_chmod.side_effect = mock_chmod_side_effect
+
+ outside_tree_dir = os.path.join(TEMPDIR, 'outside_tree_dir')
+ with ArchiveMaker() as arc:
+ arc.add('x', symlink_to='.')
+ arc.add('x', type=tarfile.DIRTYPE, mode='?rwsrwsrwt')
+ arc.add('x', symlink_to=outside_tree_dir)
+
+ os.makedirs(outside_tree_dir)
+ try:
+ arc.open().extractall(path=tempdir, filter='tar')
+ finally:
+ os_helper.rmtree(outside_tree_dir)
+ os_helper.rmtree(tempdir)
+
class CommandLineTest(unittest.TestCase):
@@ -3275,6 +3300,10 @@ class NoneInfoExtractTests(ReadTest):
got_paths = set(
p.relative_to(directory)
for p in pathlib.Path(directory).glob('**/*'))
+ if self.extraction_filter in (None, 'data'):
+ # The 'data' filter is expected to reject special files
+ for path in 'ustar/fifotype', 'ustar/blktype', 'ustar/chrtype':
+ got_paths.discard(pathlib.Path(path))
self.assertEqual(self.control_paths, got_paths)
@contextmanager
@@ -3504,12 +3533,28 @@ class ArchiveMaker:
self.bio = None
def add(self, name, *, type=None, symlink_to=None, hardlink_to=None,
- mode=None, size=None, **kwargs):
- """Add a member to the test archive. Call within `with`."""
+ mode=None, size=None, content=None, **kwargs):
+ """Add a member to the test archive. Call within `with`.
+
+ Provides many shortcuts:
+ - default `type` is based on symlink_to, hardlink_to, and trailing `/`
+ in name (which is stripped)
+ - size & content defaults are based on each other
+ - content can be str or bytes
+ - mode should be textual ('-rwxrwxrwx')
+
+ (add more! this is unstable internal test-only API)
+ """
name = str(name)
tarinfo = tarfile.TarInfo(name).replace(**kwargs)
+ if content is not None:
+ if isinstance(content, str):
+ content = content.encode()
+ size = len(content)
if size is not None:
tarinfo.size = size
+ if content is None:
+ content = bytes(tarinfo.size)
if mode:
tarinfo.mode = _filemode_to_int(mode)
if symlink_to is not None:
@@ -3523,7 +3568,7 @@ class ArchiveMaker:
if type is not None:
tarinfo.type = type
if tarinfo.isreg():
- fileobj = io.BytesIO(bytes(tarinfo.size))
+ fileobj = io.BytesIO(content)
else:
fileobj = None
self.tar_w.addfile(tarinfo, fileobj)
@@ -3557,7 +3602,7 @@ class TestExtractionFilters(unittest.TestCase):
destdir = outerdir / 'dest'
@contextmanager
- def check_context(self, tar, filter):
+ def check_context(self, tar, filter, *, check_flag=True):
"""Extracts `tar` to `self.destdir` and allows checking the result
If an error occurs, it must be checked using `expect_exception`
@@ -3566,27 +3611,40 @@ class TestExtractionFilters(unittest.TestCase):
except the destination directory itself and parent directories of
other files.
When checking directories, do so before their contents.
+
+ A file called 'flag' is made in outerdir (i.e. outside destdir)
+ before extraction; it should not be altered nor should its contents
+ be read/copied.
"""
with os_helper.temp_dir(self.outerdir):
+ flag_path = self.outerdir / 'flag'
+ flag_path.write_text('capture me')
try:
tar.extractall(self.destdir, filter=filter)
except Exception as exc:
self.raised_exception = exc
+ self.reraise_exception = True
self.expected_paths = set()
else:
self.raised_exception = None
+ self.reraise_exception = False
self.expected_paths = set(self.outerdir.glob('**/*'))
self.expected_paths.discard(self.destdir)
+ self.expected_paths.discard(flag_path)
try:
- yield
+ yield self
finally:
tar.close()
- if self.raised_exception:
+ if self.reraise_exception:
raise self.raised_exception
self.assertEqual(self.expected_paths, set())
+ if check_flag:
+ self.assertEqual(flag_path.read_text(), 'capture me')
+ else:
+ assert filter == 'fully_trusted'
def expect_file(self, name, type=None, symlink_to=None, mode=None,
- size=None):
+ size=None, content=None):
"""Check a single file. See check_context."""
if self.raised_exception:
raise self.raised_exception
@@ -3605,26 +3663,45 @@ class TestExtractionFilters(unittest.TestCase):
# The symlink might be the same (textually) as what we expect,
# but some systems change the link to an equivalent path, so
# we fall back to samefile().
- if expected != got:
- self.assertTrue(got.samefile(expected))
+ try:
+ if expected != got:
+ self.assertTrue(got.samefile(expected))
+ except Exception as e:
+ # attach a note, so it's shown even if `samefile` fails
+ e.add_note(f'{expected=}, {got=}')
+ raise
elif type == tarfile.REGTYPE or type is None:
self.assertTrue(path.is_file())
elif type == tarfile.DIRTYPE:
self.assertTrue(path.is_dir())
elif type == tarfile.FIFOTYPE:
self.assertTrue(path.is_fifo())
+ elif type == tarfile.SYMTYPE:
+ self.assertTrue(path.is_symlink())
else:
raise NotImplementedError(type)
if size is not None:
self.assertEqual(path.stat().st_size, size)
+ if content is not None:
+ self.assertEqual(path.read_text(), content)
for parent in path.parents:
self.expected_paths.discard(parent)
+ def expect_any_tree(self, name):
+ """Check a directory; forget about its contents."""
+ tree_path = (self.destdir / name).resolve()
+ self.expect_file(tree_path, type=tarfile.DIRTYPE)
+ self.expected_paths = {
+ p for p in self.expected_paths
+ if tree_path not in p.parents
+ }
+
def expect_exception(self, exc_type, message_re='.'):
with self.assertRaisesRegex(exc_type, message_re):
if self.raised_exception is not None:
raise self.raised_exception
- self.raised_exception = None
+ self.reraise_exception = False
+ return self.raised_exception
def test_benign_file(self):
with ArchiveMaker() as arc:
@@ -3710,6 +3787,80 @@ class TestExtractionFilters(unittest.TestCase):
self.expect_file('parent/evil')
@symlink_test
+ @os_helper.skip_unless_symlink
+ def test_realpath_limit_attack(self):
+ # (CVE-2025-4517)
+
+ with ArchiveMaker() as arc:
+ # populate the symlinks and dirs that expand in os.path.realpath()
+ # The component length is chosen so that in common cases, the unexpanded
+ # path fits in PATH_MAX, but it overflows when the final symlink
+ # is expanded
+ steps = "abcdefghijklmnop"
+ if sys.platform == 'win32':
+ component = 'd' * 25
+ elif 'PC_PATH_MAX' in os.pathconf_names:
+ max_path_len = os.pathconf(self.outerdir.parent, "PC_PATH_MAX")
+ path_sep_len = 1
+ dest_len = len(str(self.destdir)) + path_sep_len
+ component_len = (max_path_len - dest_len) // (len(steps) + path_sep_len)
+ component = 'd' * component_len
+ else:
+ raise NotImplementedError("Need to guess component length for {sys.platform}")
+ path = ""
+ step_path = ""
+ for i in steps:
+ arc.add(os.path.join(path, component), type=tarfile.DIRTYPE,
+ mode='drwxrwxrwx')
+ arc.add(os.path.join(path, i), symlink_to=component)
+ path = os.path.join(path, component)
+ step_path = os.path.join(step_path, i)
+ # create the final symlink that exceeds PATH_MAX and simply points
+ # to the top dir.
+ # this link will never be expanded by
+ # os.path.realpath(strict=False), nor anything after it.
+ linkpath = os.path.join(*steps, "l"*254)
+ parent_segments = [".."] * len(steps)
+ arc.add(linkpath, symlink_to=os.path.join(*parent_segments))
+ # make a symlink outside to keep the tar command happy
+ arc.add("escape", symlink_to=os.path.join(linkpath, ".."))
+ # use the symlinks above, that are not checked, to create a hardlink
+ # to a file outside of the destination path
+ arc.add("flaglink", hardlink_to=os.path.join("escape", "flag"))
+ # now that we have the hardlink we can overwrite the file
+ arc.add("flaglink", content='overwrite')
+ # we can also create new files as well!
+ arc.add("escape/newfile", content='new')
+
+ with (self.subTest('fully_trusted'),
+ self.check_context(arc.open(), filter='fully_trusted',
+ check_flag=False)):
+ if sys.platform == 'win32':
+ self.expect_exception((FileNotFoundError, FileExistsError))
+ elif self.raised_exception:
+ # Cannot symlink/hardlink: tarfile falls back to getmember()
+ self.expect_exception(KeyError)
+ # Otherwise, this block should never enter.
+ else:
+ self.expect_any_tree(component)
+ self.expect_file('flaglink', content='overwrite')
+ self.expect_file('../newfile', content='new')
+ self.expect_file('escape', type=tarfile.SYMTYPE)
+ self.expect_file('a', symlink_to=component)
+
+ for filter in 'tar', 'data':
+ with self.subTest(filter), self.check_context(arc.open(), filter=filter):
+ exc = self.expect_exception((OSError, KeyError))
+ if isinstance(exc, OSError):
+ if sys.platform == 'win32':
+ # 3: ERROR_PATH_NOT_FOUND
+ # 5: ERROR_ACCESS_DENIED
+ # 206: ERROR_FILENAME_EXCED_RANGE
+ self.assertIn(exc.winerror, (3, 5, 206))
+ else:
+ self.assertEqual(exc.errno, errno.ENAMETOOLONG)
+
+ @symlink_test
def test_parent_symlink2(self):
# Test interplaying symlinks
# Inspired by 'dirsymlink2b' in jwilk/traversal-archives
@@ -3931,8 +4082,8 @@ class TestExtractionFilters(unittest.TestCase):
arc.add('symlink2', symlink_to=os.path.join(
'linkdir', 'hardlink2'))
arc.add('targetdir/target', size=3)
- arc.add('linkdir/hardlink', hardlink_to='targetdir/target')
- arc.add('linkdir/hardlink2', hardlink_to='linkdir/symlink')
+ arc.add('linkdir/hardlink', hardlink_to=os.path.join('targetdir', 'target'))
+ arc.add('linkdir/hardlink2', hardlink_to=os.path.join('linkdir', 'symlink'))
for filter in 'tar', 'data', 'fully_trusted':
with self.check_context(arc.open(), filter):
@@ -3948,6 +4099,129 @@ class TestExtractionFilters(unittest.TestCase):
self.expect_file('linkdir/symlink', size=3)
self.expect_file('symlink2', size=3)
+ @symlink_test
+ def test_sneaky_hardlink_fallback(self):
+ # (CVE-2025-4330)
+ # Test that when hardlink extraction falls back to extracting members
+ # from the archive, the extracted member is (re-)filtered.
+ with ArchiveMaker() as arc:
+ # Create a directory structure so the c/escape symlink stays
+ # inside the path
+ arc.add("a/t/dummy")
+ # Create b/ directory
+ arc.add("b/")
+ # Point "c" to the bottom of the tree in "a"
+ arc.add("c", symlink_to=os.path.join("a", "t"))
+ # link to non-existant location under "a"
+ arc.add("c/escape", symlink_to=os.path.join("..", "..",
+ "link_here"))
+ # Move "c" to point to "b" ("c/escape" no longer exists)
+ arc.add("c", symlink_to="b")
+ # Attempt to create a hard link to "c/escape". Since it doesn't
+ # exist it will attempt to extract "cescape" but at "boom".
+ arc.add("boom", hardlink_to=os.path.join("c", "escape"))
+
+ with self.check_context(arc.open(), 'data'):
+ if not os_helper.can_symlink():
+ # When 'c/escape' is extracted, 'c' is a regular
+ # directory, and 'c/escape' *would* point outside
+ # the destination if symlinks were allowed.
+ self.expect_exception(
+ tarfile.LinkOutsideDestinationError)
+ elif sys.platform == "win32":
+ # On Windows, 'c/escape' points outside the destination
+ self.expect_exception(tarfile.LinkOutsideDestinationError)
+ else:
+ e = self.expect_exception(
+ tarfile.LinkFallbackError,
+ "link 'boom' would be extracted as a copy of "
+ + "'c/escape', which was rejected")
+ self.assertIsInstance(e.__cause__,
+ tarfile.LinkOutsideDestinationError)
+ for filter in 'tar', 'fully_trusted':
+ with self.subTest(filter), self.check_context(arc.open(), filter):
+ if not os_helper.can_symlink():
+ self.expect_file("a/t/dummy")
+ self.expect_file("b/")
+ self.expect_file("c/")
+ else:
+ self.expect_file("a/t/dummy")
+ self.expect_file("b/")
+ self.expect_file("a/t/escape", symlink_to='../../link_here')
+ self.expect_file("boom", symlink_to='../../link_here')
+ self.expect_file("c", symlink_to='b')
+
+ @symlink_test
+ def test_exfiltration_via_symlink(self):
+ # (CVE-2025-4138)
+ # Test changing symlinks that result in a symlink pointing outside
+ # the extraction directory, unless prevented by 'data' filter's
+ # normalization.
+ with ArchiveMaker() as arc:
+ arc.add("escape", symlink_to=os.path.join('link', 'link', '..', '..', 'link-here'))
+ arc.add("link", symlink_to='./')
+
+ for filter in 'tar', 'data', 'fully_trusted':
+ with self.check_context(arc.open(), filter):
+ if os_helper.can_symlink():
+ self.expect_file("link", symlink_to='./')
+ if filter == 'data':
+ self.expect_file("escape", symlink_to='link-here')
+ else:
+ self.expect_file("escape",
+ symlink_to='link/link/../../link-here')
+ else:
+ # Nothing is extracted.
+ pass
+
+ @symlink_test
+ def test_chmod_outside_dir(self):
+ # (CVE-2024-12718)
+ # Test that members used for delayed updates of directory metadata
+ # are (re-)filtered.
+ with ArchiveMaker() as arc:
+ # "pwn" is a veeeery innocent symlink:
+ arc.add("a/pwn", symlink_to='.')
+ # But now "pwn" is also a directory, so it's scheduled to have its
+ # metadata updated later:
+ arc.add("a/pwn/", mode='drwxrwxrwx')
+ # Oops, "pwn" is not so innocent any more:
+ arc.add("a/pwn", symlink_to='x/../')
+ # Newly created symlink points to the dest dir,
+ # so it's OK for the "data" filter.
+ arc.add('a/x', symlink_to=('../'))
+ # But now "pwn" points outside the dest dir
+
+ for filter in 'tar', 'data', 'fully_trusted':
+ with self.check_context(arc.open(), filter) as cc:
+ if not os_helper.can_symlink():
+ self.expect_file("a/pwn/")
+ elif filter == 'data':
+ self.expect_file("a/x", symlink_to='../')
+ self.expect_file("a/pwn", symlink_to='.')
+ else:
+ self.expect_file("a/x", symlink_to='../')
+ self.expect_file("a/pwn", symlink_to='x/../')
+ if sys.platform != "win32":
+ st_mode = cc.outerdir.stat().st_mode
+ self.assertNotEqual(st_mode & 0o777, 0o777)
+
+ def test_link_fallback_normalizes(self):
+ # Make sure hardlink fallbacks work for non-normalized paths for all
+ # filters
+ with ArchiveMaker() as arc:
+ arc.add("dir/")
+ arc.add("dir/../afile")
+ arc.add("link1", hardlink_to='dir/../afile')
+ arc.add("link2", hardlink_to='dir/../dir/../afile')
+
+ for filter in 'tar', 'data', 'fully_trusted':
+ with self.check_context(arc.open(), filter) as cc:
+ self.expect_file("dir/")
+ self.expect_file("afile")
+ self.expect_file("link1")
+ self.expect_file("link2")
+
def test_modes(self):
# Test how file modes are extracted
# (Note that the modes are ignored on platforms without working chmod)
@@ -4072,7 +4346,7 @@ class TestExtractionFilters(unittest.TestCase):
# The 'tar' filter returns TarInfo objects with the same name/type.
# (It can also fail for particularly "evil" input, but we don't have
# that in the test archive.)
- with tarfile.TarFile.open(tarname) as tar:
+ with tarfile.TarFile.open(tarname, encoding="iso8859-1") as tar:
for tarinfo in tar.getmembers():
try:
filtered = tarfile.tar_filter(tarinfo, '')
@@ -4084,7 +4358,7 @@ class TestExtractionFilters(unittest.TestCase):
def test_data_filter(self):
# The 'data' filter either raises, or returns TarInfo with the same
# name/type.
- with tarfile.TarFile.open(tarname) as tar:
+ with tarfile.TarFile.open(tarname, encoding="iso8859-1") as tar:
for tarinfo in tar.getmembers():
try:
filtered = tarfile.data_filter(tarinfo, '')
@@ -4242,13 +4516,13 @@ class TestExtractionFilters(unittest.TestCase):
# If errorlevel is 0, errors affected by errorlevel are ignored
with self.check_context(arc.open(errorlevel=0), extracterror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=0), filtererror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=0), oserror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=0), tarerror_filter):
self.expect_exception(tarfile.TarError)
@@ -4259,7 +4533,7 @@ class TestExtractionFilters(unittest.TestCase):
# If 1, all fatal errors are raised
with self.check_context(arc.open(errorlevel=1), extracterror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=1), filtererror_filter):
self.expect_exception(tarfile.FilterError)
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index e6b19fe1812..865e0c5b40d 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -1,6 +1,8 @@
import contextlib
+import itertools
import os
import re
+import string
import tempfile
import token
import tokenize
@@ -3238,5 +3240,77 @@ class CommandLineTest(unittest.TestCase):
self.check_output(source, expect, flag)
+class StringPrefixTest(unittest.TestCase):
+ @staticmethod
+ def determine_valid_prefixes():
+ # Try all lengths until we find a length that has zero valid
+ # prefixes. This will miss the case where for example there
+ # are no valid 3 character prefixes, but there are valid 4
+ # character prefixes. That seems unlikely.
+
+ single_char_valid_prefixes = set()
+
+ # Find all of the single character string prefixes. Just get
+ # the lowercase version, we'll deal with combinations of upper
+ # and lower case later. I'm using this logic just in case
+ # some uppercase-only prefix is added.
+ for letter in itertools.chain(string.ascii_lowercase, string.ascii_uppercase):
+ try:
+ eval(f'{letter}""')
+ single_char_valid_prefixes.add(letter.lower())
+ except SyntaxError:
+ pass
+
+ # This logic assumes that all combinations of valid prefixes only use
+ # the characters that are valid single character prefixes. That seems
+ # like a valid assumption, but if it ever changes this will need
+ # adjusting.
+ valid_prefixes = set()
+ for length in itertools.count():
+ num_at_this_length = 0
+ for prefix in (
+ "".join(l)
+ for l in itertools.combinations(single_char_valid_prefixes, length)
+ ):
+ for t in itertools.permutations(prefix):
+ for u in itertools.product(*[(c, c.upper()) for c in t]):
+ p = "".join(u)
+ if p == "not":
+ # 'not' can never be a string prefix,
+ # because it's a valid expression: not ""
+ continue
+ try:
+ eval(f'{p}""')
+
+ # No syntax error, so p is a valid string
+ # prefix.
+
+ valid_prefixes.add(p)
+ num_at_this_length += 1
+ except SyntaxError:
+ pass
+ if num_at_this_length == 0:
+ return valid_prefixes
+
+
+ def test_prefixes(self):
+ # Get the list of defined string prefixes. I don't see an
+ # obvious documented way of doing this, but probably the best
+ # thing is to split apart tokenize.StringPrefix.
+
+ # Make sure StringPrefix begins and ends in parens. We're
+ # assuming it's of the form "(a|b|ab)", if a, b, and cd are
+ # valid string prefixes.
+ self.assertEqual(tokenize.StringPrefix[0], '(')
+ self.assertEqual(tokenize.StringPrefix[-1], ')')
+
+ # Then split apart everything else by '|'.
+ defined_prefixes = set(tokenize.StringPrefix[1:-1].split('|'))
+
+ # Now compute the actual allowed string prefixes and compare
+ # to what is defined in the tokenize module.
+ self.assertEqual(defined_prefixes, self.determine_valid_prefixes())
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index b9be87f357f..74b979d0096 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -4188,6 +4188,15 @@ class SuggestionFormattingTestBase:
self.assertNotIn("blech", actual)
self.assertNotIn("oh no!", actual)
+ def test_attribute_error_with_non_string_candidates(self):
+ class T:
+ bluch = 1
+
+ instance = T()
+ instance.__dict__[0] = 1
+ actual = self.get_suggestion(instance, 'blich')
+ self.assertIn("bluch", actual)
+
def test_attribute_error_with_bad_name(self):
def raise_attribute_error_with_bad_name():
raise AttributeError(name=12, obj=23)
@@ -4223,8 +4232,8 @@ class SuggestionFormattingTestBase:
return mod_name
- def get_import_from_suggestion(self, mod_dict, name):
- modname = self.make_module(mod_dict)
+ def get_import_from_suggestion(self, code, name):
+ modname = self.make_module(code)
def callable():
try:
@@ -4301,6 +4310,13 @@ class SuggestionFormattingTestBase:
self.assertIn("'_bluch'", self.get_import_from_suggestion(code, '_luch'))
self.assertNotIn("'_bluch'", self.get_import_from_suggestion(code, 'bluch'))
+ def test_import_from_suggestions_non_string(self):
+ modWithNonStringAttr = textwrap.dedent("""\
+ globals()[0] = 1
+ bluch = 1
+ """)
+ self.assertIn("'bluch'", self.get_import_from_suggestion(modWithNonStringAttr, 'blech'))
+
def test_import_from_suggestions_do_not_trigger_for_long_attributes(self):
code = "blech = None"
@@ -4397,6 +4413,15 @@ class SuggestionFormattingTestBase:
actual = self.get_suggestion(func)
self.assertIn("'ZeroDivisionError'?", actual)
+ def test_name_error_suggestions_with_non_string_candidates(self):
+ def func():
+ abc = 1
+ custom_globals = globals().copy()
+ custom_globals[0] = 1
+ print(eval("abv", custom_globals, locals()))
+ actual = self.get_suggestion(func)
+ self.assertIn("abc", actual)
+
def test_name_error_suggestions_do_not_trigger_for_long_names(self):
def func():
somethingverywronghehehehehehe = None
diff --git a/Lib/test/test_uuid.py b/Lib/test/test_uuid.py
index 958be5408ce..7ddacf07a2c 100755
--- a/Lib/test/test_uuid.py
+++ b/Lib/test/test_uuid.py
@@ -14,6 +14,7 @@ from unittest import mock
from test import support
from test.support import import_helper
+from test.support.script_helper import assert_python_ok
py_uuid = import_helper.import_fresh_module('uuid', blocked=['_uuid'])
c_uuid = import_helper.import_fresh_module('uuid', fresh=['_uuid'])
@@ -1217,10 +1218,37 @@ class BaseTestUUID:
class TestUUIDWithoutExtModule(BaseTestUUID, unittest.TestCase):
uuid = py_uuid
+
@unittest.skipUnless(c_uuid, 'requires the C _uuid module')
class TestUUIDWithExtModule(BaseTestUUID, unittest.TestCase):
uuid = c_uuid
+ def check_has_stable_libuuid_extractable_node(self):
+ if not self.uuid._has_stable_extractable_node:
+ self.skipTest("libuuid cannot deduce MAC address")
+
+ @unittest.skipUnless(os.name == 'posix', 'POSIX only')
+ def test_unix_getnode_from_libuuid(self):
+ self.check_has_stable_libuuid_extractable_node()
+ script = 'import uuid; print(uuid._unix_getnode())'
+ _, n_a, _ = assert_python_ok('-c', script)
+ _, n_b, _ = assert_python_ok('-c', script)
+ n_a, n_b = n_a.decode().strip(), n_b.decode().strip()
+ self.assertTrue(n_a.isdigit())
+ self.assertTrue(n_b.isdigit())
+ self.assertEqual(n_a, n_b)
+
+ @unittest.skipUnless(os.name == 'nt', 'Windows only')
+ def test_windows_getnode_from_libuuid(self):
+ self.check_has_stable_libuuid_extractable_node()
+ script = 'import uuid; print(uuid._windll_getnode())'
+ _, n_a, _ = assert_python_ok('-c', script)
+ _, n_b, _ = assert_python_ok('-c', script)
+ n_a, n_b = n_a.decode().strip(), n_b.decode().strip()
+ self.assertTrue(n_a.isdigit())
+ self.assertTrue(n_b.isdigit())
+ self.assertEqual(n_a, n_b)
+
class BaseTestInternals:
_uuid = py_uuid
diff --git a/Lib/test/test_zlib.py b/Lib/test/test_zlib.py
index 4d97fe56f3a..c57ab51eca1 100644
--- a/Lib/test/test_zlib.py
+++ b/Lib/test/test_zlib.py
@@ -119,6 +119,114 @@ class ChecksumTestCase(unittest.TestCase):
self.assertEqual(binascii.crc32(b'spam'), zlib.crc32(b'spam'))
+class ChecksumCombineMixin:
+ """Mixin class for testing checksum combination."""
+
+ N = 1000
+ default_iv: int
+
+ def parse_iv(self, iv):
+ """Parse an IV value.
+
+ - The default IV is returned if *iv* is None.
+ - A random IV is returned if *iv* is -1.
+ - Otherwise, *iv* is returned as is.
+ """
+ if iv is None:
+ return self.default_iv
+ if iv == -1:
+ return random.randint(1, 0x80000000)
+ return iv
+
+ def checksum(self, data, init=None):
+ """Compute the checksum of data with a given initial value.
+
+ The *init* value is parsed by ``parse_iv``.
+ """
+ iv = self.parse_iv(init)
+ return self._checksum(data, iv)
+
+ def _checksum(self, data, init):
+ raise NotImplementedError
+
+ def combine(self, a, b, blen):
+ """Combine two checksums together."""
+ raise NotImplementedError
+
+ def get_random_data(self, data_len, *, iv=None):
+ """Get a triplet (data, iv, checksum)."""
+ data = random.randbytes(data_len)
+ init = self.parse_iv(iv)
+ checksum = self.checksum(data, init)
+ return data, init, checksum
+
+ def test_combine_empty(self):
+ for _ in range(self.N):
+ a, iv, checksum = self.get_random_data(32, iv=-1)
+ res = self.combine(iv, self.checksum(a), len(a))
+ self.assertEqual(res, checksum)
+
+ def test_combine_no_iv(self):
+ for _ in range(self.N):
+ a, _, chk_a = self.get_random_data(32)
+ b, _, chk_b = self.get_random_data(64)
+ res = self.combine(chk_a, chk_b, len(b))
+ self.assertEqual(res, self.checksum(a + b))
+
+ def test_combine_no_iv_invalid_length(self):
+ a, _, chk_a = self.get_random_data(32)
+ b, _, chk_b = self.get_random_data(64)
+ checksum = self.checksum(a + b)
+ for invalid_len in [1, len(a), 48, len(b) + 1, 191]:
+ invalid_res = self.combine(chk_a, chk_b, invalid_len)
+ self.assertNotEqual(invalid_res, checksum)
+
+ self.assertRaises(TypeError, self.combine, 0, 0, "len")
+
+ def test_combine_with_iv(self):
+ for _ in range(self.N):
+ a, iv_a, chk_a_with_iv = self.get_random_data(32, iv=-1)
+ chk_a_no_iv = self.checksum(a)
+ b, iv_b, chk_b_with_iv = self.get_random_data(64, iv=-1)
+ chk_b_no_iv = self.checksum(b)
+
+ # We can represent c = COMBINE(CHK(a, iv_a), CHK(b, iv_b)) as:
+ #
+ # c = CHK(CHK(b'', iv_a) + CHK(a) + CHK(b'', iv_b) + CHK(b))
+ # = COMBINE(
+ # COMBINE(CHK(b'', iv_a), CHK(a)),
+ # COMBINE(CHK(b'', iv_b), CHK(b)),
+ # )
+ # = COMBINE(COMBINE(iv_a, CHK(a)), COMBINE(iv_b, CHK(b)))
+ tmp0 = self.combine(iv_a, chk_a_no_iv, len(a))
+ tmp1 = self.combine(iv_b, chk_b_no_iv, len(b))
+ expected = self.combine(tmp0, tmp1, len(b))
+ checksum = self.combine(chk_a_with_iv, chk_b_with_iv, len(b))
+ self.assertEqual(checksum, expected)
+
+
+class CRC32CombineTestCase(ChecksumCombineMixin, unittest.TestCase):
+
+ default_iv = 0
+
+ def _checksum(self, data, init):
+ return zlib.crc32(data, init)
+
+ def combine(self, a, b, blen):
+ return zlib.crc32_combine(a, b, blen)
+
+
+class Adler32CombineTestCase(ChecksumCombineMixin, unittest.TestCase):
+
+ default_iv = 1
+
+ def _checksum(self, data, init):
+ return zlib.adler32(data, init)
+
+ def combine(self, a, b, blen):
+ return zlib.adler32_combine(a, b, blen)
+
+
# Issue #10276 - check that inputs >=4 GiB are handled correctly.
class ChecksumBigBufferTestCase(unittest.TestCase):
diff --git a/Lib/test/test_zstd.py b/Lib/test/test_zstd.py
index bc809603cbc..e475d9346b9 100644
--- a/Lib/test/test_zstd.py
+++ b/Lib/test/test_zstd.py
@@ -64,6 +64,10 @@ TRAINED_DICT = None
SUPPORT_MULTITHREADING = False
+C_INT_MIN = -(2**31)
+C_INT_MAX = (2**31) - 1
+
+
def setUpModule():
global SUPPORT_MULTITHREADING
SUPPORT_MULTITHREADING = CompressionParameter.nb_workers.bounds() != (0, 0)
@@ -195,14 +199,21 @@ class CompressorTestCase(unittest.TestCase):
self.assertRaises(TypeError, ZstdCompressor, zstd_dict=b"abcd1234")
self.assertRaises(TypeError, ZstdCompressor, zstd_dict={1: 2, 3: 4})
- with self.assertRaises(ValueError):
- ZstdCompressor(2**31)
- with self.assertRaises(ValueError):
- ZstdCompressor(options={2**31: 100})
+ # valid range for compression level is [-(1<<17), 22]
+ msg = r'illegal compression level {}; the valid range is \[-?\d+, -?\d+\]'
+ with self.assertRaisesRegex(ValueError, msg.format(C_INT_MAX)):
+ ZstdCompressor(C_INT_MAX)
+ with self.assertRaisesRegex(ValueError, msg.format(C_INT_MIN)):
+ ZstdCompressor(C_INT_MIN)
+ msg = r'illegal compression level; the valid range is \[-?\d+, -?\d+\]'
+ with self.assertRaisesRegex(ValueError, msg):
+ ZstdCompressor(level=-(2**1000))
+ with self.assertRaisesRegex(ValueError, msg):
+ ZstdCompressor(level=2**1000)
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdCompressor(options={CompressionParameter.window_log: 100})
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdCompressor(options={3333: 100})
# Method bad arguments
@@ -253,18 +264,32 @@ class CompressorTestCase(unittest.TestCase):
}
ZstdCompressor(options=d)
- # larger than signed int, ValueError
d1 = d.copy()
- d1[CompressionParameter.ldm_bucket_size_log] = 2**31
- self.assertRaises(ValueError, ZstdCompressor, options=d1)
+ # larger than signed int
+ d1[CompressionParameter.ldm_bucket_size_log] = C_INT_MAX
+ with self.assertRaises(ValueError):
+ ZstdCompressor(options=d1)
+ # smaller than signed int
+ d1[CompressionParameter.ldm_bucket_size_log] = C_INT_MIN
+ with self.assertRaises(ValueError):
+ ZstdCompressor(options=d1)
- # clamp compressionLevel
+ # out of bounds compression level
level_min, level_max = CompressionParameter.compression_level.bounds()
- compress(b'', level_max+1)
- compress(b'', level_min-1)
-
- compress(b'', options={CompressionParameter.compression_level:level_max+1})
- compress(b'', options={CompressionParameter.compression_level:level_min-1})
+ with self.assertRaises(ValueError):
+ compress(b'', level_max+1)
+ with self.assertRaises(ValueError):
+ compress(b'', level_min-1)
+ with self.assertRaises(ValueError):
+ compress(b'', 2**1000)
+ with self.assertRaises(ValueError):
+ compress(b'', -(2**1000))
+ with self.assertRaises(ValueError):
+ compress(b'', options={
+ CompressionParameter.compression_level: level_max+1})
+ with self.assertRaises(ValueError):
+ compress(b'', options={
+ CompressionParameter.compression_level: level_min-1})
# zstd lib doesn't support MT compression
if not SUPPORT_MULTITHREADING:
@@ -277,19 +302,19 @@ class CompressorTestCase(unittest.TestCase):
# out of bounds error msg
option = {CompressionParameter.window_log:100}
- with self.assertRaisesRegex(ZstdError,
- (r'Error when setting zstd compression parameter "window_log", '
- r'it should \d+ <= value <= \d+, provided value is 100\. '
- r'\((?:32|64)-bit build\)')):
+ with self.assertRaisesRegex(
+ ValueError,
+ "compression parameter 'window_log' received an illegal value 100; "
+ r'the valid range is \[-?\d+, -?\d+\]',
+ ):
compress(b'', options=option)
def test_unknown_compression_parameter(self):
KEY = 100001234
option = {CompressionParameter.compression_level: 10,
KEY: 200000000}
- pattern = (r'Invalid zstd compression parameter.*?'
- fr'"unknown parameter \(key {KEY}\)"')
- with self.assertRaisesRegex(ZstdError, pattern):
+ pattern = rf"invalid compression parameter 'unknown parameter \(key {KEY}\)'"
+ with self.assertRaisesRegex(ValueError, pattern):
ZstdCompressor(options=option)
@unittest.skipIf(not SUPPORT_MULTITHREADING,
@@ -384,12 +409,22 @@ class DecompressorTestCase(unittest.TestCase):
self.assertRaises(TypeError, ZstdDecompressor, options=b'abc')
with self.assertRaises(ValueError):
- ZstdDecompressor(options={2**31 : 100})
+ ZstdDecompressor(options={C_INT_MAX: 100})
+ with self.assertRaises(ValueError):
+ ZstdDecompressor(options={C_INT_MIN: 100})
+ with self.assertRaises(ValueError):
+ ZstdDecompressor(options={0: C_INT_MAX})
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor(options={2**1000: 100})
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor(options={-(2**1000): 100})
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor(options={0: -(2**1000)})
- with self.assertRaises(ZstdError):
- ZstdDecompressor(options={DecompressionParameter.window_log_max:100})
- with self.assertRaises(ZstdError):
- ZstdDecompressor(options={3333 : 100})
+ with self.assertRaises(ValueError):
+ ZstdDecompressor(options={DecompressionParameter.window_log_max: 100})
+ with self.assertRaises(ValueError):
+ ZstdDecompressor(options={3333: 100})
empty = compress(b'')
lzd = ZstdDecompressor()
@@ -402,26 +437,52 @@ class DecompressorTestCase(unittest.TestCase):
d = {DecompressionParameter.window_log_max : 15}
ZstdDecompressor(options=d)
- # larger than signed int, ValueError
d1 = d.copy()
- d1[DecompressionParameter.window_log_max] = 2**31
- self.assertRaises(ValueError, ZstdDecompressor, None, d1)
+ # larger than signed int
+ d1[DecompressionParameter.window_log_max] = 2**1000
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor(None, d1)
+ # smaller than signed int
+ d1[DecompressionParameter.window_log_max] = -(2**1000)
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor(None, d1)
+
+ d1[DecompressionParameter.window_log_max] = C_INT_MAX
+ with self.assertRaises(ValueError):
+ ZstdDecompressor(None, d1)
+ d1[DecompressionParameter.window_log_max] = C_INT_MIN
+ with self.assertRaises(ValueError):
+ ZstdDecompressor(None, d1)
# out of bounds error msg
options = {DecompressionParameter.window_log_max:100}
- with self.assertRaisesRegex(ZstdError,
- (r'Error when setting zstd decompression parameter "window_log_max", '
- r'it should \d+ <= value <= \d+, provided value is 100\. '
- r'\((?:32|64)-bit build\)')):
+ with self.assertRaisesRegex(
+ ValueError,
+ "decompression parameter 'window_log_max' received an illegal value 100; "
+ r'the valid range is \[-?\d+, -?\d+\]',
+ ):
+ decompress(b'', options=options)
+
+ # out of bounds deecompression parameter
+ options[DecompressionParameter.window_log_max] = C_INT_MAX
+ with self.assertRaises(ValueError):
+ decompress(b'', options=options)
+ options[DecompressionParameter.window_log_max] = C_INT_MIN
+ with self.assertRaises(ValueError):
+ decompress(b'', options=options)
+ options[DecompressionParameter.window_log_max] = 2**1000
+ with self.assertRaises(OverflowError):
+ decompress(b'', options=options)
+ options[DecompressionParameter.window_log_max] = -(2**1000)
+ with self.assertRaises(OverflowError):
decompress(b'', options=options)
def test_unknown_decompression_parameter(self):
KEY = 100001234
options = {DecompressionParameter.window_log_max: DecompressionParameter.window_log_max.bounds()[1],
KEY: 200000000}
- pattern = (r'Invalid zstd decompression parameter.*?'
- fr'"unknown parameter \(key {KEY}\)"')
- with self.assertRaisesRegex(ZstdError, pattern):
+ pattern = rf"invalid decompression parameter 'unknown parameter \(key {KEY}\)'"
+ with self.assertRaisesRegex(ValueError, pattern):
ZstdDecompressor(options=options)
def test_decompress_epilogue_flags(self):
@@ -1077,27 +1138,41 @@ class ZstdDictTestCase(unittest.TestCase):
ZstdDecompressor(zd)
# wrong type
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
- ZstdCompressor(zstd_dict=(zd, b'123'))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdCompressor(zstd_dict=[zd, 1])
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdCompressor(zstd_dict=(zd, 1.0))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdCompressor(zstd_dict=(zd,))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdCompressor(zstd_dict=(zd, 1, 2))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdCompressor(zstd_dict=(zd, -1))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdCompressor(zstd_dict=(zd, 3))
-
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
- ZstdDecompressor(zstd_dict=(zd, b'123'))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaises(OverflowError):
+ ZstdCompressor(zstd_dict=(zd, 2**1000))
+ with self.assertRaises(OverflowError):
+ ZstdCompressor(zstd_dict=(zd, -2**1000))
+
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdDecompressor(zstd_dict=[zd, 1])
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdDecompressor(zstd_dict=(zd, 1.0))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdDecompressor((zd,))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdDecompressor((zd, 1, 2))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdDecompressor((zd, -1))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdDecompressor((zd, 3))
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor((zd, 2**1000))
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor((zd, -2**1000))
def test_train_dict(self):
-
-
TRAINED_DICT = train_dict(SAMPLES, DICT_SIZE1)
ZstdDict(TRAINED_DICT.dict_content, is_raw=False)
@@ -1179,17 +1254,36 @@ class ZstdDictTestCase(unittest.TestCase):
with self.assertRaises(TypeError):
_zstd.train_dict({}, (), 100)
with self.assertRaises(TypeError):
+ _zstd.train_dict(bytearray(), (), 100)
+ with self.assertRaises(TypeError):
_zstd.train_dict(b'', 99, 100)
with self.assertRaises(TypeError):
+ _zstd.train_dict(b'', [], 100)
+ with self.assertRaises(TypeError):
_zstd.train_dict(b'', (), 100.1)
+ with self.assertRaises(TypeError):
+ _zstd.train_dict(b'', (99.1,), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'abc', (4, -1), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'abc', (2,), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'', (99,), 100)
# size > size_t
with self.assertRaises(ValueError):
- _zstd.train_dict(b'', (2**64+1,), 100)
+ _zstd.train_dict(b'', (2**1000,), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'', (-2**1000,), 100)
# dict_size <= 0
with self.assertRaises(ValueError):
_zstd.train_dict(b'', (), 0)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'', (), -1)
+
+ with self.assertRaises(ZstdError):
+ _zstd.train_dict(b'', (), 1)
def test_finalize_dict_c(self):
with self.assertRaises(TypeError):
@@ -1199,21 +1293,50 @@ class ZstdDictTestCase(unittest.TestCase):
with self.assertRaises(TypeError):
_zstd.finalize_dict({}, b'', (), 100, 5)
with self.assertRaises(TypeError):
+ _zstd.finalize_dict(bytearray(TRAINED_DICT.dict_content), b'', (), 100, 5)
+ with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, {}, (), 100, 5)
with self.assertRaises(TypeError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, bytearray(), (), 100, 5)
+ with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', 99, 100, 5)
with self.assertRaises(TypeError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', [], 100, 5)
+ with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100.1, 5)
with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, 5.1)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'abc', (4, -1), 100, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'abc', (2,), 100, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (99,), 100, 5)
+
# size > size_t
with self.assertRaises(ValueError):
- _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (2**64+1,), 100, 5)
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (2**1000,), 100, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (-2**1000,), 100, 5)
# dict_size <= 0
with self.assertRaises(ValueError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 0, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), -1, 5)
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 2**1000, 5)
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), -2**1000, 5)
+
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, 2**1000)
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, -2**1000)
+
+ with self.assertRaises(ZstdError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, 5)
def test_train_buffer_protocol_samples(self):
def _nbytes(dat):
@@ -1424,11 +1547,11 @@ class FileTestCase(unittest.TestCase):
ZstdFile(io.BytesIO(COMPRESSED_100_PLUS_32KB), "rw")
with self.assertRaisesRegex(TypeError,
- r"NOT be a CompressionParameter"):
+ r"not be a CompressionParameter"):
ZstdFile(io.BytesIO(), 'rb',
options={CompressionParameter.compression_level:5})
with self.assertRaisesRegex(TypeError,
- r"NOT be a DecompressionParameter"):
+ r"not be a DecompressionParameter"):
ZstdFile(io.BytesIO(), 'wb',
options={DecompressionParameter.window_log_max:21})
@@ -1439,19 +1562,19 @@ class FileTestCase(unittest.TestCase):
with self.assertRaises(TypeError):
ZstdFile(io.BytesIO(), "w", level='asd')
# CHECK_UNKNOWN and anything above CHECK_ID_MAX should be invalid.
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdFile(io.BytesIO(), "w", options={999:9999})
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdFile(io.BytesIO(), "w", options={CompressionParameter.window_log:99})
with self.assertRaises(TypeError):
ZstdFile(io.BytesIO(COMPRESSED_100_PLUS_32KB), "r", options=33)
- with self.assertRaises(ValueError):
+ with self.assertRaises(OverflowError):
ZstdFile(io.BytesIO(COMPRESSED_100_PLUS_32KB),
options={DecompressionParameter.window_log_max:2**31})
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdFile(io.BytesIO(COMPRESSED_100_PLUS_32KB),
options={444:333})
@@ -1467,7 +1590,7 @@ class FileTestCase(unittest.TestCase):
tmp_f.write(DAT_130K_C)
filename = tmp_f.name
- with self.assertRaises(ValueError):
+ with self.assertRaises(TypeError):
ZstdFile(filename, options={'a':'b'})
# for PyPy
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 559a7aecbde..7e71755068e 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -86,7 +86,7 @@ def _all_string_prefixes():
# The valid string prefixes. Only contain the lower case versions,
# and don't contain any permutations (include 'fr', but not
# 'rf'). The various permutations will be generated.
- _valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr']
+ _valid_string_prefixes = ['b', 'r', 'u', 'f', 't', 'br', 'fr', 'tr']
# if we add binary f-strings, add: ['fb', 'fbr']
result = {''}
for prefix in _valid_string_prefixes:
diff --git a/Lib/traceback.py b/Lib/traceback.py
index 17b082eced6..a1f175dbbaa 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -1595,7 +1595,11 @@ def _compute_suggestion_error(exc_value, tb, wrong_name):
if isinstance(exc_value, AttributeError):
obj = exc_value.obj
try:
- d = dir(obj)
+ try:
+ d = dir(obj)
+ except TypeError: # Attributes are unsortable, e.g. int and str
+ d = list(obj.__class__.__dict__.keys()) + list(obj.__dict__.keys())
+ d = sorted([x for x in d if isinstance(x, str)])
hide_underscored = (wrong_name[:1] != '_')
if hide_underscored and tb is not None:
while tb.tb_next is not None:
@@ -1610,7 +1614,11 @@ def _compute_suggestion_error(exc_value, tb, wrong_name):
elif isinstance(exc_value, ImportError):
try:
mod = __import__(exc_value.name)
- d = dir(mod)
+ try:
+ d = dir(mod)
+ except TypeError: # Attributes are unsortable, e.g. int and str
+ d = list(mod.__dict__.keys())
+ d = sorted([x for x in d if isinstance(x, str)])
if wrong_name[:1] != '_':
d = [x for x in d if x[:1] != '_']
except Exception:
@@ -1628,6 +1636,7 @@ def _compute_suggestion_error(exc_value, tb, wrong_name):
+ list(frame.f_globals)
+ list(frame.f_builtins)
)
+ d = [x for x in d if isinstance(x, str)]
# Check first if we are in a method and the instance
# has the wrong name as attribute
diff --git a/Lib/uuid.py b/Lib/uuid.py
index 036ffebf67a..06f81a7c338 100644
--- a/Lib/uuid.py
+++ b/Lib/uuid.py
@@ -633,22 +633,24 @@ def _netstat_getnode():
try:
import _uuid
_generate_time_safe = getattr(_uuid, "generate_time_safe", None)
+ _has_stable_extractable_node = _uuid.has_stable_extractable_node
_UuidCreate = getattr(_uuid, "UuidCreate", None)
except ImportError:
_uuid = None
_generate_time_safe = None
+ _has_stable_extractable_node = False
_UuidCreate = None
def _unix_getnode():
"""Get the hardware address on Unix using the _uuid extension module."""
- if _generate_time_safe:
+ if _generate_time_safe and _has_stable_extractable_node:
uuid_time, _ = _generate_time_safe()
return UUID(bytes=uuid_time).node
def _windll_getnode():
"""Get the hardware address on Windows using the _uuid extension module."""
- if _UuidCreate:
+ if _UuidCreate and _has_stable_extractable_node:
uuid_bytes = _UuidCreate()
return UUID(bytes_le=uuid_bytes).node
diff --git a/Misc/ACKS b/Misc/ACKS
index 571142e7e49..2435943f1bb 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -1365,6 +1365,7 @@ Milan Oberkirch
Pascal Oberndoerfer
Géry Ogam
Seonkyo Ok
+Andrea Oliveri
Jeffrey Ollie
Adam Olsen
Bryan Olson
diff --git a/Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst b/Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst
new file mode 100644
index 00000000000..a742a6add8a
--- /dev/null
+++ b/Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst
@@ -0,0 +1,3 @@
+Windows builds with profile-guided optimization enabled now use
+``/GENPROFILE`` and ``/USEPROFILE`` instead of deprecated ``/LTCG:``
+options.
diff --git a/Misc/NEWS.d/next/C_API/2023-10-18-14-36-35.gh-issue-108512.fMZLfr.rst b/Misc/NEWS.d/next/C_API/2023-10-18-14-36-35.gh-issue-108512.fMZLfr.rst
new file mode 100644
index 00000000000..279e588f3ad
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2023-10-18-14-36-35.gh-issue-108512.fMZLfr.rst
@@ -0,0 +1,2 @@
+Add functions :c:func:`PySys_GetAttr`, :c:func:`PySys_GetAttrString`,
+:c:func:`PySys_GetOptionalAttr` and :c:func:`PySys_GetOptionalAttrString`.
diff --git a/Misc/NEWS.d/next/C_API/2025-05-13-16-06-46.gh-issue-133968.6alWst.rst b/Misc/NEWS.d/next/C_API/2025-05-13-16-06-46.gh-issue-133968.6alWst.rst
new file mode 100644
index 00000000000..47d5a3bda39
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-05-13-16-06-46.gh-issue-133968.6alWst.rst
@@ -0,0 +1,4 @@
+Add :c:func:`PyUnicodeWriter_WriteASCII` function to write an ASCII string
+into a :c:type:`PyUnicodeWriter`. The function is faster than
+:c:func:`PyUnicodeWriter_WriteUTF8`, but has an undefined behavior if the
+input string contains non-ASCII characters. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst b/Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst
new file mode 100644
index 00000000000..a85d2e90576
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst
@@ -0,0 +1,3 @@
+Change :c:func:`!PyThread_allocate_lock` implementation to ``PyMutex``.
+On Windows, :c:func:`!PyThread_acquire_lock_timed` now supports the *intr_flag*
+parameter: it can be interrupted. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst b/Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst
new file mode 100644
index 00000000000..e49f7651065
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst
@@ -0,0 +1,2 @@
+Implement :c:func:`PyObject_DelAttr` and :c:func:`PyObject_DelAttrString` as
+macros in the limited C API 3.12 and older. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-03-14-13-08-20.gh-issue-127266._tyfBp.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-03-14-13-08-20.gh-issue-127266._tyfBp.rst
new file mode 100644
index 00000000000..b26977628de
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-03-14-13-08-20.gh-issue-127266._tyfBp.rst
@@ -0,0 +1,6 @@
+In the free-threaded build, avoid data races caused by updating type slots
+or type flags after the type was initially created. For those (typically
+rare) cases, use the stop-the-world mechanism. Remove the use of atomics
+when reading or writing type flags. The use of atomics is not sufficient to
+avoid races (since flags are sometimes read without a lock and without
+atomics) and are no longer required.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-04-30-14-13-01.gh-issue-132554.GqQaUp.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-04-30-14-13-01.gh-issue-132554.GqQaUp.rst
new file mode 100644
index 00000000000..bfe2d633309
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-04-30-14-13-01.gh-issue-132554.GqQaUp.rst
@@ -0,0 +1,4 @@
+Change iteration to use "virtual iterators" for sequences. Instead of
+creating an iterator, a tagged integer representing the next index is pushed
+to the stack above the iterable. For non-sequence iterators, ``NULL`` is
+pushed.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-18-14-33-23.gh-issue-69605.ZMO49F.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-18-14-33-23.gh-issue-69605.ZMO49F.rst
new file mode 100644
index 00000000000..7b7275fee69
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-18-14-33-23.gh-issue-69605.ZMO49F.rst
@@ -0,0 +1,2 @@
+When auto-completing an import in the :term:`REPL`, finding no candidates
+now issues no suggestion, rather than suggestions from the current namespace.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-26-15-55-50.gh-issue-133912.-xAguL.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-26-15-55-50.gh-issue-133912.-xAguL.rst
new file mode 100644
index 00000000000..2118f3d0c35
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-26-15-55-50.gh-issue-133912.-xAguL.rst
@@ -0,0 +1,2 @@
+Fix the C API function ``PyObject_GenericSetDict`` to handle extension
+classes with inline values.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-18-59-54.gh-issue-134679.FWPBu6.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-18-59-54.gh-issue-134679.FWPBu6.rst
new file mode 100644
index 00000000000..22f1282fea1
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-18-59-54.gh-issue-134679.FWPBu6.rst
@@ -0,0 +1,2 @@
+Fix crash in the :term:`free threading` build's QSBR code that could occur
+when changing an object's ``__dict__`` attribute.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-20-21-34.gh-issue-131798.b32zkl.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-20-21-34.gh-issue-131798.b32zkl.rst
new file mode 100644
index 00000000000..ed4b31bd7be
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-27-20-21-34.gh-issue-131798.b32zkl.rst
@@ -0,0 +1 @@
+Allow the JIT to remove unnecessary ``_ITER_CHECK_TUPLE`` ops.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst
new file mode 100644
index 00000000000..fc71cd21a36
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst
@@ -0,0 +1 @@
+Fix argument checking of :meth:`~agen.athrow`.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst
new file mode 100644
index 00000000000..3178f0aaf88
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst
@@ -0,0 +1 @@
+Fix crash when iterating over lines in a text file on the :term:`free threaded <free threading>` build.
diff --git a/Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst b/Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst
new file mode 100644
index 00000000000..157522f9aab
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst
@@ -0,0 +1,2 @@
+Avoid exiting the new REPL and offer suggestions even if there are non-string
+candidates when errors occur.
diff --git a/Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst b/Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst
new file mode 100644
index 00000000000..cfc783900de
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst
@@ -0,0 +1 @@
+Make concurrent iterations over :class:`itertools.cycle` safe under free-threading.
diff --git a/Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst b/Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst
new file mode 100644
index 00000000000..d9e2eae02dc
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst
@@ -0,0 +1 @@
+Fix formatting issues in :func:`json.dump` when both *indent* and *skipkeys* are used.
diff --git a/Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst b/Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst
new file mode 100644
index 00000000000..63fed60ced0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst
@@ -0,0 +1,2 @@
+Fix libc thread safety issues with :mod:`os` by replacing ``getlogin`` with
+``getlogin_r`` re-entrant version.
diff --git a/Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst b/Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst
new file mode 100644
index 00000000000..e33b061bb9b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst
@@ -0,0 +1,2 @@
+Add :func:`math.isnormal` and :func:`math.issubnormal` functions. Patch by
+Sergey B Kirpichev.
diff --git a/Misc/NEWS.d/next/Library/2025-04-29-11-48-46.gh-issue-132876.lyTQGZ.rst b/Misc/NEWS.d/next/Library/2025-04-29-11-48-46.gh-issue-132876.lyTQGZ.rst
new file mode 100644
index 00000000000..cb3ca3321e3
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-29-11-48-46.gh-issue-132876.lyTQGZ.rst
@@ -0,0 +1,4 @@
+``ldexp()`` on Windows doesn't round subnormal results before Windows 11,
+but should. Python's :func:`math.ldexp` wrapper now does round them, so
+results may change slightly, in rare cases of very small results, on
+Windows versions before 11.
diff --git a/Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst b/Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst
new file mode 100644
index 00000000000..55608528a45
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst
@@ -0,0 +1,2 @@
+Improve error messages for incorrect types and values of :class:`csv.Dialect`
+attributes.
diff --git a/Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst b/Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst
new file mode 100644
index 00000000000..0c07beb7693
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst
@@ -0,0 +1,2 @@
+:func:`random.getrandbits` can now generate more that 2\ :sup:`31` bits.
+:func:`random.randbytes` can now generate more that 256 MiB.
diff --git a/Misc/NEWS.d/next/Library/2025-05-11-10-01-48.gh-issue-133866.g3dHP_.rst b/Misc/NEWS.d/next/Library/2025-05-11-10-01-48.gh-issue-133866.g3dHP_.rst
new file mode 100644
index 00000000000..00f13c9a305
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-11-10-01-48.gh-issue-133866.g3dHP_.rst
@@ -0,0 +1,3 @@
+Remove the undocumented function :func:`!ctypes.SetPointerType`,
+which has been deprecated since Python 3.13.
+Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst b/Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst
new file mode 100644
index 00000000000..a9a56d9239b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst
@@ -0,0 +1,2 @@
+:mod:`shelve` as well as underlying :mod:`!dbm.dumb` and :mod:`!dbm.sqlite` now have :meth:`!reorganize` methods to
+recover unused free space previously occupied by deleted entries.
diff --git a/Misc/NEWS.d/next/Library/2025-05-23-20-01-52.gh-issue-134580.xnaJ70.rst b/Misc/NEWS.d/next/Library/2025-05-23-20-01-52.gh-issue-134580.xnaJ70.rst
new file mode 100644
index 00000000000..979d310d3ce
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-23-20-01-52.gh-issue-134580.xnaJ70.rst
@@ -0,0 +1,3 @@
+Improved the styling of HTML diff pages generated by the
+:class:`difflib.HtmlDiff` class, and migrated the output to the HTML5
+standard.
diff --git a/Misc/NEWS.d/next/Library/2025-05-24-13-10-35.gh-issue-134210.0IuMY2.rst b/Misc/NEWS.d/next/Library/2025-05-24-13-10-35.gh-issue-134210.0IuMY2.rst
new file mode 100644
index 00000000000..b440e8308db
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-24-13-10-35.gh-issue-134210.0IuMY2.rst
@@ -0,0 +1,2 @@
+:func:`curses.window.getch` now correctly handles signals. Patch by Bénédikt
+Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-25-13-46-37.gh-issue-134635.ZlPrlX.rst b/Misc/NEWS.d/next/Library/2025-05-25-13-46-37.gh-issue-134635.ZlPrlX.rst
new file mode 100644
index 00000000000..4cabbf2f896
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-25-13-46-37.gh-issue-134635.ZlPrlX.rst
@@ -0,0 +1,3 @@
+:mod:`zlib`: Allow to combine Adler-32 and CRC-32 checksums via
+:func:`~zlib.adler32_combine` and :func:`~zlib.crc32_combine`. Patch by
+Callum Attryde and Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-26-11-01-54.gh-issue-134531.my1Fzt.rst b/Misc/NEWS.d/next/Library/2025-05-26-11-01-54.gh-issue-134531.my1Fzt.rst
new file mode 100644
index 00000000000..ee5690df5c4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-26-11-01-54.gh-issue-134531.my1Fzt.rst
@@ -0,0 +1,2 @@
+:mod:`!_hashlib`: Rename internal C functions for :class:`!_hashlib.HASH`
+and :class:`!_hashlib.HASHXOF` objects. Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-26-12-31-08.gh-issue-132710.ApU3TZ.rst b/Misc/NEWS.d/next/Library/2025-05-26-12-31-08.gh-issue-132710.ApU3TZ.rst
new file mode 100644
index 00000000000..b7011517aa9
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-26-12-31-08.gh-issue-132710.ApU3TZ.rst
@@ -0,0 +1,3 @@
+If possible, ensure that :func:`uuid.getnode` returns the same result even
+across different processes. Previously, the result was constant only within
+the same process. Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst b/Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst
new file mode 100644
index 00000000000..282eb088b89
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst
@@ -0,0 +1,5 @@
+Built-in HACL* and OpenSSL implementations of hash function constructors
+now correctly accept the same *documented* named arguments. For instance,
+:func:`~hashlib.md5` could be previously invoked as ``md5(data=data)``
+or ``md5(string=string)`` depending on the underlying implementation
+but these calls were not compatible. Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-26-17-06-40.gh-issue-134637.9-3zRL.rst b/Misc/NEWS.d/next/Library/2025-05-26-17-06-40.gh-issue-134637.9-3zRL.rst
new file mode 100644
index 00000000000..2a4d8725210
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-26-17-06-40.gh-issue-134637.9-3zRL.rst
@@ -0,0 +1 @@
+Fix performance regression in calling a :mod:`ctypes` function pointer in :term:`free threading`.
diff --git a/Misc/NEWS.d/next/Library/2025-05-26-22-18-32.gh-issue-134771.RKXpLT.rst b/Misc/NEWS.d/next/Library/2025-05-26-22-18-32.gh-issue-134771.RKXpLT.rst
new file mode 100644
index 00000000000..4b70c6ef398
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-26-22-18-32.gh-issue-134771.RKXpLT.rst
@@ -0,0 +1,2 @@
+The ``time_clockid_converter()`` function now selects correct type for
+``clockid_t`` on Cygwin which fixes a build error.
diff --git a/Misc/NEWS.d/next/Library/2025-05-27-11-13-51.gh-issue-133579.KY9M6S.rst b/Misc/NEWS.d/next/Library/2025-05-27-11-13-51.gh-issue-133579.KY9M6S.rst
new file mode 100644
index 00000000000..129d5d98425
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-27-11-13-51.gh-issue-133579.KY9M6S.rst
@@ -0,0 +1,8 @@
+:ref:`curses.window <curses-window-objects>`: Consistently report failures
+of curses C API calls in Window methods by raising a :exc:`curses.error`.
+This affects :meth:`~curses.window.addch`, :meth:`~curses.window.addnstr`,
+:meth:`~curses.window.addstr`, :meth:`~curses.window.border`,
+:meth:`~curses.window.box`, :meth:`~curses.window.chgat`,
+:meth:`~curses.window.getbkgd`, :meth:`~curses.window.inch`,
+:meth:`~curses.window.insstr` and :meth:`~curses.window.insnstr`.
+Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-27-11-18-13.gh-issue-133579.ohtgdC.rst b/Misc/NEWS.d/next/Library/2025-05-27-11-18-13.gh-issue-133579.ohtgdC.rst
new file mode 100644
index 00000000000..e0ef959f125
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-27-11-18-13.gh-issue-133579.ohtgdC.rst
@@ -0,0 +1,3 @@
+:meth:`curses.window.refresh` and :meth:`curses.window.noutrefresh` now raise
+a :exc:`TypeError` instead of :exc:`curses.error` when called with an incorrect
+number of arguments for :ref:`pads <windows-and-pads>`. Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-27-11-24-38.gh-issue-133579.WGPUC1.rst b/Misc/NEWS.d/next/Library/2025-05-27-11-24-38.gh-issue-133579.WGPUC1.rst
new file mode 100644
index 00000000000..552b7ca1a71
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-27-11-24-38.gh-issue-133579.WGPUC1.rst
@@ -0,0 +1,7 @@
+:mod:`curses`: Consistently report failures of curses C API calls in
+module-level methods by raising a :exc:`curses.error`. This affects
+:func:`~curses.assume_default_colors`, :func:`~curses.baudrate`,
+:func:`~curses.cbreak`, :func:`~curses.echo`, :func:`~curses.longname`,
+:func:`~curses.initscr`, :func:`~curses.nl`, :func:`~curses.raw`,
+:func:`~curses.termattrs`, :func:`~curses.termname` and :func:`~curses.unctrl`.
+Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-28-15-53-27.gh-issue-128840.Nur2pB.rst b/Misc/NEWS.d/next/Library/2025-05-28-15-53-27.gh-issue-128840.Nur2pB.rst
new file mode 100644
index 00000000000..faff433aa4b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-28-15-53-27.gh-issue-128840.Nur2pB.rst
@@ -0,0 +1 @@
+Fix parsing long IPv6 addresses with embedded IPv4 address.
diff --git a/Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst b/Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst
new file mode 100644
index 00000000000..92e38c0bb5a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst
@@ -0,0 +1,3 @@
+Improve error report for :mod:`doctest`\ s run with :mod:`unittest`. Remove
+:mod:`!doctest` module frames from tracebacks and redundant newline
+character from a failure message.
diff --git a/Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst b/Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst
new file mode 100644
index 00000000000..4b05d42c109
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst
@@ -0,0 +1,2 @@
+Fix possible crash in the :mod:`compression.zstd` module related to setting
+parameter types. Patch by Jelle Zijlstra.
diff --git a/Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst b/Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst
new file mode 100644
index 00000000000..e37cf121f5f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst
@@ -0,0 +1,3 @@
+Run each example as a subtest in unit tests synthesized by
+:func:`doctest.DocFileSuite` and :func:`doctest.DocTestSuite`.
+Add the :meth:`doctest.DocTestRunner.report_skip` method.
diff --git a/Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst b/Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst
new file mode 100644
index 00000000000..922ab168fdd
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst
@@ -0,0 +1,2 @@
+:func:`ast.dump` now only omits ``None`` and ``[]`` values if they are
+default values.
diff --git a/Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst b/Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst
new file mode 100644
index 00000000000..06c1d5583be
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst
@@ -0,0 +1 @@
+By default, omit optional ``Load()`` values in :func:`ast.dump`.
diff --git a/Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst b/Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst
new file mode 100644
index 00000000000..20f53569ef4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst
@@ -0,0 +1,3 @@
+Fix the "unknown action" exception in
+:meth:`argparse.ArgumentParser.add_argument_group` to correctly replace the
+action class.
diff --git a/Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst b/Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst
new file mode 100644
index 00000000000..e75ce1622d6
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst
@@ -0,0 +1,7 @@
+:mod:`hashlib`: Supporting the ``string`` keyword parameter in hash function
+constructors such as :func:`~hashlib.new` or the direct hash-named constructors
+such as :func:`~hashlib.md5` and :func:`~hashlib.sha256` is now deprecated and
+slated for removal in Python 3.19.
+Prefer passing the initial data as a positional argument for maximum backwards
+compatibility.
+Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst b/Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst
new file mode 100644
index 00000000000..1defb9a72e0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst
@@ -0,0 +1,3 @@
+Add the ``'m'`` flag for :func:`dbm.gnu.open` which allows to disable the
+use of :manpage:`mmap(2)`. This may harm performance, but improve crash
+tolerance.
diff --git a/Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst b/Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst
new file mode 100644
index 00000000000..e07200f9a3f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst
@@ -0,0 +1,3 @@
+Accept leading zeros in precision and width fields for
+:class:`~fractions.Fraction` formatting, for example ``format(Fraction(1,
+3), '.016f')``.
diff --git a/Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst b/Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst
new file mode 100644
index 00000000000..d97d937376a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst
@@ -0,0 +1,3 @@
++Accept leading zeros in precision and width fields for
++:class:`~decimal.Decimal` formatting, for example ``format(Decimal(1.25),
+'.016f')``.
diff --git a/Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst b/Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst
new file mode 100644
index 00000000000..08a0087e203
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst
@@ -0,0 +1,6 @@
+Fixes multiple issues that allowed ``tarfile`` extraction filters
+(``filter="data"`` and ``filter="tar"``) to be bypassed using crafted
+symlinks and hard links.
+
+Addresses :cve:`2024-12718`, :cve:`2025-4138`, :cve:`2025-4330`, and :cve:`2025-4517`.
+
diff --git a/Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst b/Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst
new file mode 100644
index 00000000000..36e70b1c0d8
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst
@@ -0,0 +1,2 @@
+Fix a crash that could occur on Windows when a background thread waits on a
+:c:type:`PyMutex` while the main thread is shutting down the interpreter.
diff --git a/Misc/stable_abi.toml b/Misc/stable_abi.toml
index d38919a8ea9..1f323cc0397 100644
--- a/Misc/stable_abi.toml
+++ b/Misc/stable_abi.toml
@@ -2575,3 +2575,11 @@
added = '3.14'
[function.Py_PACK_VERSION]
added = '3.14'
+[function.PySys_GetAttr]
+ added = '3.15'
+[function.PySys_GetAttrString]
+ added = '3.15'
+[function.PySys_GetOptionalAttr]
+ added = '3.15'
+[function.PySys_GetOptionalAttrString]
+ added = '3.15'
diff --git a/Modules/_csv.c b/Modules/_csv.c
index e5ae853590b..2e04136e0ac 100644
--- a/Modules/_csv.c
+++ b/Modules/_csv.c
@@ -237,7 +237,7 @@ _set_int(const char *name, int *target, PyObject *src, int dflt)
int value;
if (!PyLong_CheckExact(src)) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be an integer", name);
+ "\"%s\" must be an integer, not %T", name, src);
return -1;
}
value = PyLong_AsInt(src);
@@ -255,27 +255,29 @@ _set_char_or_none(const char *name, Py_UCS4 *target, PyObject *src, Py_UCS4 dflt
if (src == NULL) {
*target = dflt;
}
- else {
+ else if (src == Py_None) {
*target = NOT_SET;
- if (src != Py_None) {
- if (!PyUnicode_Check(src)) {
- PyErr_Format(PyExc_TypeError,
- "\"%s\" must be string or None, not %.200s", name,
- Py_TYPE(src)->tp_name);
- return -1;
- }
- Py_ssize_t len = PyUnicode_GetLength(src);
- if (len < 0) {
- return -1;
- }
- if (len != 1) {
- PyErr_Format(PyExc_TypeError,
- "\"%s\" must be a 1-character string",
- name);
- return -1;
- }
- *target = PyUnicode_READ_CHAR(src, 0);
+ }
+ else {
+ // similar to PyArg_Parse("C?")
+ if (!PyUnicode_Check(src)) {
+ PyErr_Format(PyExc_TypeError,
+ "\"%s\" must be a unicode character or None, not %T",
+ name, src);
+ return -1;
+ }
+ Py_ssize_t len = PyUnicode_GetLength(src);
+ if (len < 0) {
+ return -1;
}
+ if (len != 1) {
+ PyErr_Format(PyExc_TypeError,
+ "\"%s\" must be a unicode character or None, "
+ "not a string of length %zd",
+ name, len);
+ return -1;
+ }
+ *target = PyUnicode_READ_CHAR(src, 0);
}
return 0;
}
@@ -287,11 +289,12 @@ _set_char(const char *name, Py_UCS4 *target, PyObject *src, Py_UCS4 dflt)
*target = dflt;
}
else {
+ // similar to PyArg_Parse("C")
if (!PyUnicode_Check(src)) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be string, not %.200s", name,
- Py_TYPE(src)->tp_name);
- return -1;
+ "\"%s\" must be a unicode character, not %T",
+ name, src);
+ return -1;
}
Py_ssize_t len = PyUnicode_GetLength(src);
if (len < 0) {
@@ -299,8 +302,9 @@ _set_char(const char *name, Py_UCS4 *target, PyObject *src, Py_UCS4 dflt)
}
if (len != 1) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be a 1-character string",
- name);
+ "\"%s\" must be a unicode character, "
+ "not a string of length %zd",
+ name, len);
return -1;
}
*target = PyUnicode_READ_CHAR(src, 0);
@@ -314,16 +318,12 @@ _set_str(const char *name, PyObject **target, PyObject *src, const char *dflt)
if (src == NULL)
*target = PyUnicode_DecodeASCII(dflt, strlen(dflt), NULL);
else {
- if (src == Py_None)
- *target = NULL;
- else if (!PyUnicode_Check(src)) {
+ if (!PyUnicode_Check(src)) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be a string", name);
+ "\"%s\" must be a string, not %T", name, src);
return -1;
}
- else {
- Py_XSETREF(*target, Py_NewRef(src));
- }
+ Py_XSETREF(*target, Py_NewRef(src));
}
return 0;
}
@@ -533,11 +533,6 @@ dialect_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
/* validate options */
if (dialect_check_quoting(self->quoting))
goto err;
- if (self->delimiter == NOT_SET) {
- PyErr_SetString(PyExc_TypeError,
- "\"delimiter\" must be a 1-character string");
- goto err;
- }
if (quotechar == Py_None && quoting == NULL)
self->quoting = QUOTE_NONE;
if (self->quoting != QUOTE_NONE && self->quotechar == NOT_SET) {
@@ -545,10 +540,6 @@ dialect_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
"quotechar must be set if quoting enabled");
goto err;
}
- if (self->lineterminator == NULL) {
- PyErr_SetString(PyExc_TypeError, "lineterminator must be set");
- goto err;
- }
if (dialect_check_char("delimiter", self->delimiter, self, true) ||
dialect_check_char("escapechar", self->escapechar, self,
!self->skipinitialspace) ||
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index 7de6bb396b0..7e8a133caa7 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -3610,6 +3610,45 @@ generic_pycdata_new(ctypes_state *st,
PyCFuncPtr_Type
*/
+static inline void
+atomic_xsetref(PyObject **field, PyObject *value)
+{
+#ifdef Py_GIL_DISABLED
+ PyObject *old = *field;
+ _Py_atomic_store_ptr(field, value);
+ Py_XDECREF(old);
+#else
+ Py_XSETREF(*field, value);
+#endif
+}
+/*
+ This function atomically loads the reference from *field, and
+ tries to get a new reference to it. If the incref fails,
+ it acquires critical section of obj and returns a new reference to the *field.
+ In the general case, this avoids contention on acquiring the critical section.
+*/
+static inline PyObject *
+atomic_xgetref(PyObject *obj, PyObject **field)
+{
+#ifdef Py_GIL_DISABLED
+ PyObject *value = _Py_atomic_load_ptr(field);
+ if (value == NULL) {
+ return NULL;
+ }
+ if (_Py_TryIncrefCompare(field, value)) {
+ return value;
+ }
+ Py_BEGIN_CRITICAL_SECTION(obj);
+ value = Py_XNewRef(*field);
+ Py_END_CRITICAL_SECTION();
+ return value;
+#else
+ return Py_XNewRef(*field);
+#endif
+}
+
+
+
/*[clinic input]
@critical_section
@setter
@@ -3626,7 +3665,7 @@ _ctypes_CFuncPtr_errcheck_set_impl(PyCFuncPtrObject *self, PyObject *value)
return -1;
}
Py_XINCREF(value);
- Py_XSETREF(self->errcheck, value);
+ atomic_xsetref(&self->errcheck, value);
return 0;
}
@@ -3658,12 +3697,10 @@ static int
_ctypes_CFuncPtr_restype_set_impl(PyCFuncPtrObject *self, PyObject *value)
/*[clinic end generated code: output=0be0a086abbabf18 input=683c3bef4562ccc6]*/
{
- PyObject *checker, *oldchecker;
+ PyObject *checker;
if (value == NULL) {
- oldchecker = self->checker;
- self->checker = NULL;
- Py_CLEAR(self->restype);
- Py_XDECREF(oldchecker);
+ atomic_xsetref(&self->restype, NULL);
+ atomic_xsetref(&self->checker, NULL);
return 0;
}
ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
@@ -3679,11 +3716,9 @@ _ctypes_CFuncPtr_restype_set_impl(PyCFuncPtrObject *self, PyObject *value)
if (PyObject_GetOptionalAttr(value, &_Py_ID(_check_retval_), &checker) < 0) {
return -1;
}
- oldchecker = self->checker;
- self->checker = checker;
Py_INCREF(value);
- Py_XSETREF(self->restype, value);
- Py_XDECREF(oldchecker);
+ atomic_xsetref(&self->checker, checker);
+ atomic_xsetref(&self->restype, value);
return 0;
}
@@ -3728,16 +3763,16 @@ _ctypes_CFuncPtr_argtypes_set_impl(PyCFuncPtrObject *self, PyObject *value)
PyObject *converters;
if (value == NULL || value == Py_None) {
- Py_CLEAR(self->converters);
- Py_CLEAR(self->argtypes);
+ atomic_xsetref(&self->argtypes, NULL);
+ atomic_xsetref(&self->converters, NULL);
} else {
ctypes_state *st = get_module_state_by_def(Py_TYPE(Py_TYPE(self)));
converters = converters_from_argtypes(st, value);
if (!converters)
return -1;
- Py_XSETREF(self->converters, converters);
+ atomic_xsetref(&self->converters, converters);
Py_INCREF(value);
- Py_XSETREF(self->argtypes, value);
+ atomic_xsetref(&self->argtypes, value);
}
return 0;
}
@@ -4533,16 +4568,11 @@ _build_result(PyObject *result, PyObject *callargs,
}
static PyObject *
-PyCFuncPtr_call_lock_held(PyObject *op, PyObject *inargs, PyObject *kwds)
+PyCFuncPtr_call(PyObject *op, PyObject *inargs, PyObject *kwds)
{
- _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op);
- PyObject *restype;
- PyObject *converters;
- PyObject *checker;
- PyObject *argtypes;
- PyObject *result;
- PyObject *callargs;
- PyObject *errcheck;
+ PyObject *result = NULL;
+ PyObject *callargs = NULL;
+ PyObject *ret = NULL;
#ifdef MS_WIN32
IUnknown *piunk = NULL;
#endif
@@ -4560,13 +4590,24 @@ PyCFuncPtr_call_lock_held(PyObject *op, PyObject *inargs, PyObject *kwds)
}
assert(info); /* Cannot be NULL for PyCFuncPtrObject instances */
- restype = self->restype ? self->restype : info->restype;
- converters = self->converters ? self->converters : info->converters;
- checker = self->checker ? self->checker : info->checker;
- argtypes = self->argtypes ? self->argtypes : info->argtypes;
-/* later, we probably want to have an errcheck field in stginfo */
- errcheck = self->errcheck /* ? self->errcheck : info->errcheck */;
-
+ PyObject *restype = atomic_xgetref(op, &self->restype);
+ if (restype == NULL) {
+ restype = Py_XNewRef(info->restype);
+ }
+ PyObject *converters = atomic_xgetref(op, &self->converters);
+ if (converters == NULL) {
+ converters = Py_XNewRef(info->converters);
+ }
+ PyObject *checker = atomic_xgetref(op, &self->checker);
+ if (checker == NULL) {
+ checker = Py_XNewRef(info->checker);
+ }
+ PyObject *argtypes = atomic_xgetref(op, &self->argtypes);
+ if (argtypes == NULL) {
+ argtypes = Py_XNewRef(info->argtypes);
+ }
+ /* later, we probably want to have an errcheck field in stginfo */
+ PyObject *errcheck = atomic_xgetref(op, &self->errcheck);
pProc = *(void **)self->b_ptr;
#ifdef MS_WIN32
@@ -4577,25 +4618,25 @@ PyCFuncPtr_call_lock_held(PyObject *op, PyObject *inargs, PyObject *kwds)
if (!this) {
PyErr_SetString(PyExc_ValueError,
"native com method call without 'this' parameter");
- return NULL;
+ goto finally;
}
if (!CDataObject_Check(st, this)) {
PyErr_SetString(PyExc_TypeError,
"Expected a COM this pointer as first argument");
- return NULL;
+ goto finally;
}
/* there should be more checks? No, in Python */
/* First arg is a pointer to an interface instance */
if (!this->b_ptr || *(void **)this->b_ptr == NULL) {
PyErr_SetString(PyExc_ValueError,
"NULL COM pointer access");
- return NULL;
+ goto finally;
}
piunk = *(IUnknown **)this->b_ptr;
if (NULL == piunk->lpVtbl) {
PyErr_SetString(PyExc_ValueError,
"COM method call without VTable");
- return NULL;
+ goto finally;
}
pProc = ((void **)piunk->lpVtbl)[self->index - 0x1000];
}
@@ -4603,8 +4644,9 @@ PyCFuncPtr_call_lock_held(PyObject *op, PyObject *inargs, PyObject *kwds)
callargs = _build_callargs(st, self, argtypes,
inargs, kwds,
&outmask, &inoutmask, &numretvals);
- if (callargs == NULL)
- return NULL;
+ if (callargs == NULL) {
+ goto finally;
+ }
if (converters) {
int required = Py_SAFE_DOWNCAST(PyTuple_GET_SIZE(converters),
@@ -4623,7 +4665,7 @@ PyCFuncPtr_call_lock_held(PyObject *op, PyObject *inargs, PyObject *kwds)
required,
required == 1 ? "" : "s",
actual);
- return NULL;
+ goto finally;
}
} else if (required != actual) {
Py_DECREF(callargs);
@@ -4632,7 +4674,7 @@ PyCFuncPtr_call_lock_held(PyObject *op, PyObject *inargs, PyObject *kwds)
required,
required == 1 ? "" : "s",
actual);
- return NULL;
+ goto finally;
}
}
@@ -4663,23 +4705,19 @@ PyCFuncPtr_call_lock_held(PyObject *op, PyObject *inargs, PyObject *kwds)
if (v == NULL || v != callargs) {
Py_DECREF(result);
Py_DECREF(callargs);
- return v;
+ ret = v;
+ goto finally;
}
Py_DECREF(v);
}
-
- return _build_result(result, callargs,
- outmask, inoutmask, numretvals);
-}
-
-static PyObject *
-PyCFuncPtr_call(PyObject *op, PyObject *inargs, PyObject *kwds)
-{
- PyObject *result;
- Py_BEGIN_CRITICAL_SECTION(op);
- result = PyCFuncPtr_call_lock_held(op, inargs, kwds);
- Py_END_CRITICAL_SECTION();
- return result;
+ ret = _build_result(result, callargs, outmask, inoutmask, numretvals);
+finally:
+ Py_XDECREF(restype);
+ Py_XDECREF(converters);
+ Py_XDECREF(checker);
+ Py_XDECREF(argtypes);
+ Py_XDECREF(errcheck);
+ return ret;
}
static int
diff --git a/Modules/_cursesmodule.c b/Modules/_cursesmodule.c
index 5e1eccee3e4..d7788ef7a58 100644
--- a/Modules/_cursesmodule.c
+++ b/Modules/_cursesmodule.c
@@ -108,7 +108,6 @@ static const char PyCursesVersion[] = "2.2";
#include "pycore_capsule.h" // _PyCapsule_SetTraverse()
#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_structseq.h" // _PyStructSequence_NewType()
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttrString()
#include "pycore_fileutils.h" // _Py_set_inheritable
#ifdef __hpux
@@ -932,8 +931,10 @@ PyCursesWindow_dealloc(PyObject *self)
PyObject_GC_UnTrack(self);
PyCursesWindowObject *wo = (PyCursesWindowObject *)self;
if (wo->win != stdscr && wo->win != NULL) {
- // silently ignore errors in delwin(3)
- (void)delwin(wo->win);
+ if (delwin(wo->win) == ERR) {
+ curses_window_set_error(wo, "delwin", "__del__");
+ PyErr_FormatUnraisable("Exception ignored in delwin()");
+ }
}
if (wo->encoding != NULL) {
PyMem_Free(wo->encoding);
@@ -1001,7 +1002,11 @@ _curses_window_addch_impl(PyCursesWindowObject *self, int group_left_1,
type = PyCurses_ConvertToCchar_t(self, ch, &cch, wstr);
if (type == 2) {
wstr[1] = L'\0';
- setcchar(&wcval, wstr, attr, PAIR_NUMBER(attr), NULL);
+ rtn = setcchar(&wcval, wstr, attr, PAIR_NUMBER(attr), NULL);
+ if (rtn == ERR) {
+ curses_window_set_error(self, "setcchar", "addch");
+ return NULL;
+ }
if (coordinates_group) {
rtn = mvwadd_wch(self->win,y,x, &wcval);
funcname = "mvwadd_wch";
@@ -1031,6 +1036,27 @@ _curses_window_addch_impl(PyCursesWindowObject *self, int group_left_1,
return curses_window_check_err(self, rtn, funcname, "addch");
}
+#ifdef HAVE_NCURSESW
+#define curses_release_wstr(STRTYPE, WSTR) \
+ do { \
+ if ((STRTYPE) == 2) { \
+ PyMem_Free((WSTR)); \
+ } \
+ } while (0)
+#else
+#define curses_release_wstr(_STRTYPE, _WSTR)
+#endif
+
+static int
+curses_wattrset(PyCursesWindowObject *self, long attr, const char *funcname)
+{
+ if (wattrset(self->win, attr) == ERR) {
+ curses_window_set_error(self, "wattrset", funcname);
+ return -1;
+ }
+ return 0;
+}
+
/*[clinic input]
_curses.window.addstr
@@ -1084,7 +1110,10 @@ _curses_window_addstr_impl(PyCursesWindowObject *self, int group_left_1,
}
if (use_attr) {
attr_old = getattrs(self->win);
- (void)wattrset(self->win,attr);
+ if (curses_wattrset(self, attr, "addstr") < 0) {
+ curses_release_wstr(strtype, wstr);
+ return NULL;
+ }
}
#ifdef HAVE_NCURSESW
if (strtype == 2) {
@@ -1112,9 +1141,15 @@ _curses_window_addstr_impl(PyCursesWindowObject *self, int group_left_1,
}
Py_DECREF(bytesobj);
}
- if (use_attr)
- (void)wattrset(self->win,attr_old);
- return curses_window_check_err(self, rtn, funcname, "addstr");
+ if (rtn == ERR) {
+ curses_window_set_error(self, funcname, "addstr");
+ return NULL;
+ }
+ if (use_attr) {
+ rtn = wattrset(self->win, attr_old);
+ return curses_window_check_err(self, rtn, "wattrset", "addstr");
+ }
+ Py_RETURN_NONE;
}
/*[clinic input]
@@ -1173,7 +1208,10 @@ _curses_window_addnstr_impl(PyCursesWindowObject *self, int group_left_1,
if (use_attr) {
attr_old = getattrs(self->win);
- (void)wattrset(self->win,attr);
+ if (curses_wattrset(self, attr, "addnstr") < 0) {
+ curses_release_wstr(strtype, wstr);
+ return NULL;
+ }
}
#ifdef HAVE_NCURSESW
if (strtype == 2) {
@@ -1201,9 +1239,15 @@ _curses_window_addnstr_impl(PyCursesWindowObject *self, int group_left_1,
}
Py_DECREF(bytesobj);
}
- if (use_attr)
- (void)wattrset(self->win,attr_old);
- return curses_window_check_err(self, rtn, funcname, "addnstr");
+ if (rtn == ERR) {
+ curses_window_set_error(self, funcname, "addnstr");
+ return NULL;
+ }
+ if (use_attr) {
+ rtn = wattrset(self->win, attr_old);
+ return curses_window_check_err(self, rtn, "wattrset", "addnstr");
+ }
+ Py_RETURN_NONE;
}
/*[clinic input]
@@ -1345,7 +1389,7 @@ _curses_window_border_impl(PyCursesWindowObject *self, PyObject *ls,
/*[clinic end generated code: output=670ef38d3d7c2aa3 input=e015f735d67a240b]*/
{
chtype ch[8];
- int i;
+ int i, rtn;
/* Clear the array of parameters */
for(i=0; i<8; i++)
@@ -1366,10 +1410,10 @@ _curses_window_border_impl(PyCursesWindowObject *self, PyObject *ls,
#undef CONVERTTOCHTYPE
- wborder(self->win,
- ch[0], ch[1], ch[2], ch[3],
- ch[4], ch[5], ch[6], ch[7]);
- Py_RETURN_NONE;
+ rtn = wborder(self->win,
+ ch[0], ch[1], ch[2], ch[3],
+ ch[4], ch[5], ch[6], ch[7]);
+ return curses_window_check_err(self, rtn, "wborder", "border");
}
/*[clinic input]
@@ -1403,8 +1447,7 @@ _curses_window_box_impl(PyCursesWindowObject *self, int group_right_1,
return NULL;
}
}
- box(self->win,ch1,ch2);
- Py_RETURN_NONE;
+ return curses_window_check_err(self, box(self->win, ch1, ch2), "box", NULL);
}
#if defined(HAVE_NCURSES_H) || defined(MVWDELCH_IS_EXPRESSION)
@@ -1429,32 +1472,27 @@ int py_mvwdelch(WINDOW *w, int y, int x)
/* chgat, added by Fabian Kreutz <fabian.kreutz at gmx.net> */
#ifdef HAVE_CURSES_WCHGAT
-/*[-clinic input]
-_curses.window.chgat
-
- [
- y: int
- Y-coordinate.
- x: int
- X-coordinate.
- ]
- n: int = -1
- Number of characters.
+PyDoc_STRVAR(_curses_window_chgat__doc__,
+"chgat([y, x,] [n=-1,] attr)\n"
+"Set the attributes of characters.\n"
+"\n"
+" y\n"
+" Y-coordinate.\n"
+" x\n"
+" X-coordinate.\n"
+" n\n"
+" Number of characters.\n"
+" attr\n"
+" Attributes for characters.\n"
+"\n"
+"Set the attributes of num characters at the current cursor position, or at\n"
+"position (y, x) if supplied. If no value of num is given or num = -1, the\n"
+"attribute will be set on all the characters to the end of the line. This\n"
+"function does not move the cursor. The changed line will be touched using\n"
+"the touchline() method so that the contents will be redisplayed by the next\n"
+"window refresh.");
- attr: long
- Attributes for characters.
- /
-
-Set the attributes of characters.
-
-Set the attributes of num characters at the current cursor position, or at
-position (y, x) if supplied. If no value of num is given or num = -1, the
-attribute will be set on all the characters to the end of the line. This
-function does not move the cursor. The changed line will be touched using
-the touchline() method so that the contents will be redisplayed by the next
-window refresh.
-[-clinic start generated code]*/
static PyObject *
PyCursesWindow_ChgAt(PyObject *op, PyObject *args)
{
@@ -1481,19 +1519,20 @@ PyCursesWindow_ChgAt(PyObject *op, PyObject *args)
attr = lattr;
break;
case 3:
- if (!PyArg_ParseTuple(args,"iil;int,int,attr", &y, &x, &lattr))
+ if (!PyArg_ParseTuple(args,"iil;y,x,attr", &y, &x, &lattr))
return NULL;
attr = lattr;
use_xy = TRUE;
break;
case 4:
- if (!PyArg_ParseTuple(args,"iiil;int,int,n,attr", &y, &x, &num, &lattr))
+ if (!PyArg_ParseTuple(args,"iiil;y,x,n,attr", &y, &x, &num, &lattr))
return NULL;
attr = lattr;
use_xy = TRUE;
break;
default:
- PyErr_SetString(PyExc_TypeError, "chgat requires 1 to 4 arguments");
+ PyErr_SetString(PyExc_TypeError,
+ "_curses.window.chgat requires 1 to 4 arguments");
return NULL;
}
@@ -1502,15 +1541,18 @@ PyCursesWindow_ChgAt(PyObject *op, PyObject *args)
if (use_xy) {
rtn = mvwchgat(self->win,y,x,num,attr,color,NULL);
- touchline(self->win,y,1);
funcname = "mvwchgat";
} else {
getyx(self->win,y,x);
rtn = wchgat(self->win,num,attr,color,NULL);
- touchline(self->win,y,1);
funcname = "wchgat";
}
- return curses_window_check_err(self, rtn, funcname, "chgat");
+ if (rtn == ERR) {
+ curses_window_set_error(self, funcname, "chgat");
+ return NULL;
+ }
+ rtn = touchline(self->win,y,1);
+ return curses_window_check_err(self, rtn, "touchline", "chgat");
}
#endif
@@ -1647,20 +1689,40 @@ _curses_window_enclose_impl(PyCursesWindowObject *self, int y, int x)
#endif
/*[clinic input]
-_curses.window.getbkgd -> long
+_curses.window.getbkgd
Return the window's current background character/attribute pair.
[clinic start generated code]*/
-static long
+static PyObject *
_curses_window_getbkgd_impl(PyCursesWindowObject *self)
-/*[clinic end generated code: output=c52b25dc16b215c3 input=a69db882fa35426c]*/
+/*[clinic end generated code: output=3ff953412b0e6028 input=7cf1f59a31f89df4]*/
+{
+ chtype rtn = getbkgd(self->win);
+ if (rtn == (chtype)ERR) {
+ curses_window_set_error(self, "getbkgd", NULL);
+ return NULL;
+ }
+ return PyLong_FromLong(rtn);
+}
+
+static PyObject *
+curses_check_signals_on_input_error(PyCursesWindowObject *self,
+ const char *curses_funcname,
+ const char *python_funcname)
{
- return (long) getbkgd(self->win);
+ assert(!PyErr_Occurred());
+ if (PyErr_CheckSignals()) {
+ return NULL;
+ }
+ cursesmodule_state *state = get_cursesmodule_state_by_win(self);
+ PyErr_Format(state->error, "%s() (called by %s()): no input",
+ curses_funcname, python_funcname);
+ return NULL;
}
/*[clinic input]
-_curses.window.getch -> int
+_curses.window.getch
[
y: int
@@ -1677,10 +1739,10 @@ keypad keys and so on return numbers higher than 256. In no-delay mode, -1
is returned if there is no input, else getch() waits until a key is pressed.
[clinic start generated code]*/
-static int
+static PyObject *
_curses_window_getch_impl(PyCursesWindowObject *self, int group_right_1,
int y, int x)
-/*[clinic end generated code: output=980aa6af0c0ca387 input=bb24ebfb379f991f]*/
+/*[clinic end generated code: output=e1639e87d545e676 input=73f350336b1ee8c8]*/
{
int rtn;
@@ -1693,7 +1755,17 @@ _curses_window_getch_impl(PyCursesWindowObject *self, int group_right_1,
}
Py_END_ALLOW_THREADS
- return rtn;
+ if (rtn == ERR) {
+ // We suppress ERR returned by wgetch() in nodelay mode
+ // after we handled possible interruption signals.
+ if (PyErr_CheckSignals()) {
+ return NULL;
+ }
+ // ERR is an implementation detail, so to be on the safe side,
+ // we forcibly set the return value to -1 as documented above.
+ rtn = -1;
+ }
+ return PyLong_FromLong(rtn);
}
/*[clinic input]
@@ -1731,14 +1803,9 @@ _curses_window_getkey_impl(PyCursesWindowObject *self, int group_right_1,
Py_END_ALLOW_THREADS
if (rtn == ERR) {
- /* getch() returns ERR in nodelay mode */
- PyErr_CheckSignals();
- if (!PyErr_Occurred()) {
- cursesmodule_state *state = get_cursesmodule_state_by_win(self);
- const char *funcname = group_right_1 ? "mvwgetch" : "wgetch";
- PyErr_Format(state->error, "getkey(): %s(): no input", funcname);
- }
- return NULL;
+ /* wgetch() returns ERR in nodelay mode */
+ const char *funcname = group_right_1 ? "mvwgetch" : "wgetch";
+ return curses_check_signals_on_input_error(self, funcname, "getkey");
} else if (rtn <= 255) {
#ifdef NCURSES_VERSION_MAJOR
#if NCURSES_VERSION_MAJOR*100+NCURSES_VERSION_MINOR <= 507
@@ -1791,14 +1858,9 @@ _curses_window_get_wch_impl(PyCursesWindowObject *self, int group_right_1,
Py_END_ALLOW_THREADS
if (ct == ERR) {
- if (PyErr_CheckSignals())
- return NULL;
-
- /* get_wch() returns ERR in nodelay mode */
- cursesmodule_state *state = get_cursesmodule_state_by_win(self);
+ /* wget_wch() returns ERR in nodelay mode */
const char *funcname = group_right_1 ? "mvwget_wch" : "wget_wch";
- PyErr_Format(state->error, "get_wch(): %s(): no input", funcname);
- return NULL;
+ return curses_check_signals_on_input_error(self, funcname, "get_wch");
}
if (ct == KEY_CODE_YES)
return PyLong_FromLong(rtn);
@@ -1807,102 +1869,102 @@ _curses_window_get_wch_impl(PyCursesWindowObject *self, int group_right_1,
}
#endif
-/*[-clinic input]
-_curses.window.getstr
-
- [
- y: int
- Y-coordinate.
- x: int
- X-coordinate.
- ]
- n: int = 2047
- Maximal number of characters.
- /
+/*
+ * Helper function for parsing parameters from getstr() and instr().
+ * This function is necessary because Argument Clinic does not know
+ * how to handle nested optional groups with default values inside.
+ *
+ * Return 1 on success and 0 on failure, similar to PyArg_ParseTuple().
+ */
+static int
+curses_clinic_parse_optional_xy_n(PyObject *args,
+ int *y, int *x, unsigned int *n, int *use_xy,
+ const char *qualname)
+{
+ switch (PyTuple_GET_SIZE(args)) {
+ case 0: {
+ *use_xy = 0;
+ return 1;
+ }
+ case 1: {
+ *use_xy = 0;
+ return PyArg_ParseTuple(args, "O&;n",
+ _PyLong_UnsignedInt_Converter, n);
+ }
+ case 2: {
+ *use_xy = 1;
+ return PyArg_ParseTuple(args, "ii;y,x", y, x);
+ }
+ case 3: {
+ *use_xy = 1;
+ return PyArg_ParseTuple(args, "iiO&;y,x,n", y, x,
+ _PyLong_UnsignedInt_Converter, n);
+ }
+ default: {
+ *use_xy = 0;
+ PyErr_Format(PyExc_TypeError, "%s requires 0 to 3 arguments",
+ qualname);
+ return 0;
+ }
+ }
+}
-Read a string from the user, with primitive line editing capacity.
-[-clinic start generated code]*/
+PyDoc_STRVAR(_curses_window_getstr__doc__,
+"getstr([[y, x,] n=2047])\n"
+"Read a string from the user, with primitive line editing capacity.\n"
+"\n"
+" y\n"
+" Y-coordinate.\n"
+" x\n"
+" X-coordinate.\n"
+" n\n"
+" Maximal number of characters.");
static PyObject *
-PyCursesWindow_GetStr(PyObject *op, PyObject *args)
+PyCursesWindow_getstr(PyObject *op, PyObject *args)
{
PyCursesWindowObject *self = _PyCursesWindowObject_CAST(op);
+ int rtn, use_xy = 0, y = 0, x = 0;
+ unsigned int max_buf_size = 2048;
+ unsigned int n = max_buf_size - 1;
+ PyObject *res;
- int x, y, n;
- int rtn;
+ if (!curses_clinic_parse_optional_xy_n(args, &y, &x, &n, &use_xy,
+ "_curses.window.instr"))
+ {
+ return NULL;
+ }
- /* could make the buffer size larger/dynamic */
- Py_ssize_t max_buf_size = 2048;
- PyObject *result = PyBytes_FromStringAndSize(NULL, max_buf_size);
- if (result == NULL)
+ n = Py_MIN(n, max_buf_size - 1);
+ res = PyBytes_FromStringAndSize(NULL, n + 1);
+ if (res == NULL) {
return NULL;
- char *buf = PyBytes_AS_STRING(result);
+ }
+ char *buf = PyBytes_AS_STRING(res);
- switch (PyTuple_Size(args)) {
- case 0:
- Py_BEGIN_ALLOW_THREADS
- rtn = wgetnstr(self->win, buf, max_buf_size - 1);
- Py_END_ALLOW_THREADS
- break;
- case 1:
- if (!PyArg_ParseTuple(args,"i;n", &n))
- goto error;
- if (n < 0) {
- PyErr_SetString(PyExc_ValueError, "'n' must be nonnegative");
- goto error;
- }
- Py_BEGIN_ALLOW_THREADS
- rtn = wgetnstr(self->win, buf, Py_MIN(n, max_buf_size - 1));
- Py_END_ALLOW_THREADS
- break;
- case 2:
- if (!PyArg_ParseTuple(args,"ii;y,x",&y,&x))
- goto error;
+ if (use_xy) {
Py_BEGIN_ALLOW_THREADS
#ifdef STRICT_SYSV_CURSES
- rtn = wmove(self->win,y,x)==ERR ? ERR : wgetnstr(self->win, rtn, max_buf_size - 1);
+ rtn = wmove(self->win, y, x) == ERR
+ ? ERR
+ : wgetnstr(self->win, buf, n);
#else
- rtn = mvwgetnstr(self->win,y,x,buf, max_buf_size - 1);
+ rtn = mvwgetnstr(self->win, y, x, buf, n);
#endif
Py_END_ALLOW_THREADS
- break;
- case 3:
- if (!PyArg_ParseTuple(args,"iii;y,x,n", &y, &x, &n))
- goto error;
- if (n < 0) {
- PyErr_SetString(PyExc_ValueError, "'n' must be nonnegative");
- goto error;
- }
-#ifdef STRICT_SYSV_CURSES
- Py_BEGIN_ALLOW_THREADS
- rtn = wmove(self->win,y,x)==ERR ? ERR :
- wgetnstr(self->win, rtn, Py_MIN(n, max_buf_size - 1));
- Py_END_ALLOW_THREADS
-#else
+ }
+ else {
Py_BEGIN_ALLOW_THREADS
- rtn = mvwgetnstr(self->win, y, x, buf, Py_MIN(n, max_buf_size - 1));
+ rtn = wgetnstr(self->win, buf, n);
Py_END_ALLOW_THREADS
-#endif
- break;
- default:
- PyErr_SetString(PyExc_TypeError, "getstr requires 0 to 3 arguments");
- goto error;
}
if (rtn == ERR) {
- Py_DECREF(result);
+ Py_DECREF(res);
return Py_GetConstant(Py_CONSTANT_EMPTY_BYTES);
}
-
- if (_PyBytes_Resize(&result, strlen(buf)) < 0) {
- return NULL;
- }
-
- return result;
-
-error:
- Py_DECREF(result);
- return NULL;
+ _PyBytes_Resize(&res, strlen(buf)); // 'res' is set to NULL on failure
+ return res;
}
/*[clinic input]
@@ -2000,7 +2062,7 @@ _curses_window_insch_impl(PyCursesWindowObject *self, int group_left_1,
}
/*[clinic input]
-_curses.window.inch -> unsigned_long
+_curses.window.inch
[
y: int
@@ -2015,104 +2077,80 @@ Return the character at the given position in the window.
The bottom 8 bits are the character proper, and upper bits are the attributes.
[clinic start generated code]*/
-static unsigned long
+static PyObject *
_curses_window_inch_impl(PyCursesWindowObject *self, int group_right_1,
int y, int x)
-/*[clinic end generated code: output=6c4719fe978fe86a input=fac23ee11e3b3a66]*/
+/*[clinic end generated code: output=97ca8581baaafd06 input=4b4fb43d85b177c3]*/
{
- unsigned long rtn;
+ chtype rtn;
+ const char *funcname;
if (!group_right_1) {
rtn = winch(self->win);
+ funcname = "winch";
}
else {
rtn = mvwinch(self->win, y, x);
+ funcname = "mvwinch";
}
+ if (rtn == (chtype)ERR) {
+ curses_window_set_error(self, funcname, "inch");
+ return NULL;
+ }
+ return PyLong_FromUnsignedLong(rtn);
+}
+
+PyDoc_STRVAR(_curses_window_instr__doc__,
+"instr([y, x,] n=2047)\n"
+"Return a string of characters, extracted from the window.\n"
+"\n"
+" y\n"
+" Y-coordinate.\n"
+" x\n"
+" X-coordinate.\n"
+" n\n"
+" Maximal number of characters.\n"
+"\n"
+"Return a string of characters, extracted from the window starting at the\n"
+"current cursor position, or at y, x if specified. Attributes are stripped\n"
+"from the characters. If n is specified, instr() returns a string at most\n"
+"n characters long (exclusive of the trailing NUL).");
- return rtn;
-}
-
-/*[-clinic input]
-_curses.window.instr
-
- [
- y: int
- Y-coordinate.
- x: int
- X-coordinate.
- ]
- n: int = 2047
- Maximal number of characters.
- /
-
-Return a string of characters, extracted from the window.
-
-Return a string of characters, extracted from the window starting at the
-current cursor position, or at y, x if specified. Attributes are stripped
-from the characters. If n is specified, instr() returns a string at most
-n characters long (exclusive of the trailing NUL).
-[-clinic start generated code]*/
static PyObject *
-PyCursesWindow_InStr(PyObject *op, PyObject *args)
+PyCursesWindow_instr(PyObject *op, PyObject *args)
{
PyCursesWindowObject *self = _PyCursesWindowObject_CAST(op);
+ int rtn, use_xy = 0, y = 0, x = 0;
+ unsigned int max_buf_size = 2048;
+ unsigned int n = max_buf_size - 1;
+ PyObject *res;
- int x, y, n;
- int rtn;
+ if (!curses_clinic_parse_optional_xy_n(args, &y, &x, &n, &use_xy,
+ "_curses.window.instr"))
+ {
+ return NULL;
+ }
- /* could make the buffer size larger/dynamic */
- Py_ssize_t max_buf_size = 2048;
- PyObject *result = PyBytes_FromStringAndSize(NULL, max_buf_size);
- if (result == NULL)
+ n = Py_MIN(n, max_buf_size - 1);
+ res = PyBytes_FromStringAndSize(NULL, n + 1);
+ if (res == NULL) {
return NULL;
- char *buf = PyBytes_AS_STRING(result);
+ }
+ char *buf = PyBytes_AS_STRING(res);
- switch (PyTuple_Size(args)) {
- case 0:
- rtn = winnstr(self->win, buf, max_buf_size - 1);
- break;
- case 1:
- if (!PyArg_ParseTuple(args,"i;n", &n))
- goto error;
- if (n < 0) {
- PyErr_SetString(PyExc_ValueError, "'n' must be nonnegative");
- goto error;
- }
- rtn = winnstr(self->win, buf, Py_MIN(n, max_buf_size - 1));
- break;
- case 2:
- if (!PyArg_ParseTuple(args,"ii;y,x",&y,&x))
- goto error;
- rtn = mvwinnstr(self->win, y, x, buf, max_buf_size - 1);
- break;
- case 3:
- if (!PyArg_ParseTuple(args, "iii;y,x,n", &y, &x, &n))
- goto error;
- if (n < 0) {
- PyErr_SetString(PyExc_ValueError, "'n' must be nonnegative");
- goto error;
- }
- rtn = mvwinnstr(self->win, y, x, buf, Py_MIN(n, max_buf_size - 1));
- break;
- default:
- PyErr_SetString(PyExc_TypeError, "instr requires 0 or 3 arguments");
- goto error;
+ if (use_xy) {
+ rtn = mvwinnstr(self->win, y, x, buf, n);
+ }
+ else {
+ rtn = winnstr(self->win, buf, n);
}
if (rtn == ERR) {
- Py_DECREF(result);
+ Py_DECREF(res);
return Py_GetConstant(Py_CONSTANT_EMPTY_BYTES);
}
-
- if (_PyBytes_Resize(&result, strlen(buf)) < 0) {
- return NULL;
- }
-
- return result;
-
-error:
- Py_DECREF(result);
- return NULL;
+ _PyBytes_Resize(&res, strlen(buf)); // 'res' is set to NULL on failure
+ return res;
}
/*[clinic input]
@@ -2169,7 +2207,10 @@ _curses_window_insstr_impl(PyCursesWindowObject *self, int group_left_1,
if (use_attr) {
attr_old = getattrs(self->win);
- (void)wattrset(self->win, (attr_t)attr);
+ if (curses_wattrset(self, attr, "insstr") < 0) {
+ curses_release_wstr(strtype, wstr);
+ return NULL;
+ }
}
#ifdef HAVE_NCURSESW
if (strtype == 2) {
@@ -2197,9 +2238,15 @@ _curses_window_insstr_impl(PyCursesWindowObject *self, int group_left_1,
}
Py_DECREF(bytesobj);
}
- if (use_attr)
- (void)wattrset(self->win,attr_old);
- return curses_window_check_err(self, rtn, funcname, "insstr");
+ if (rtn == ERR) {
+ curses_window_set_error(self, funcname, "insstr");
+ return NULL;
+ }
+ if (use_attr) {
+ rtn = wattrset(self->win, attr_old);
+ return curses_window_check_err(self, rtn, "wattrset", "insstr");
+ }
+ Py_RETURN_NONE;
}
/*[clinic input]
@@ -2260,7 +2307,10 @@ _curses_window_insnstr_impl(PyCursesWindowObject *self, int group_left_1,
if (use_attr) {
attr_old = getattrs(self->win);
- (void)wattrset(self->win, (attr_t)attr);
+ if (curses_wattrset(self, attr, "insnstr") < 0) {
+ curses_release_wstr(strtype, wstr);
+ return NULL;
+ }
}
#ifdef HAVE_NCURSESW
if (strtype == 2) {
@@ -2288,9 +2338,15 @@ _curses_window_insnstr_impl(PyCursesWindowObject *self, int group_left_1,
}
Py_DECREF(bytesobj);
}
- if (use_attr)
- (void)wattrset(self->win,attr_old);
- return curses_window_check_err(self, rtn, funcname, "insnstr");
+ if (rtn == ERR) {
+ curses_window_set_error(self, funcname, "insnstr");
+ return NULL;
+ }
+ if (use_attr) {
+ rtn = wattrset(self->win, attr_old);
+ return curses_window_check_err(self, rtn, "wattrset", "insnstr");
+ }
+ Py_RETURN_NONE;
}
/*[clinic input]
@@ -2366,8 +2422,7 @@ _curses_window_noutrefresh_impl(PyCursesWindowObject *self)
#ifdef py_is_pad
if (py_is_pad(self->win)) {
if (!group_right_1) {
- cursesmodule_state *state = get_cursesmodule_state_by_win(self);
- PyErr_SetString(state->error,
+ PyErr_SetString(PyExc_TypeError,
"noutrefresh() called for a pad "
"requires 6 arguments");
return NULL;
@@ -2593,8 +2648,7 @@ _curses_window_refresh_impl(PyCursesWindowObject *self, int group_right_1,
#ifdef py_is_pad
if (py_is_pad(self->win)) {
if (!group_right_1) {
- cursesmodule_state *state = get_cursesmodule_state_by_win(self);
- PyErr_SetString(state->error,
+ PyErr_SetString(PyExc_TypeError,
"refresh() for a pad requires 6 arguments");
return NULL;
}
@@ -2854,7 +2908,10 @@ static PyMethodDef PyCursesWindow_methods[] = {
_CURSES_WINDOW_ATTRSET_METHODDEF
_CURSES_WINDOW_BKGD_METHODDEF
#ifdef HAVE_CURSES_WCHGAT
- {"chgat", PyCursesWindow_ChgAt, METH_VARARGS},
+ {
+ "chgat", PyCursesWindow_ChgAt, METH_VARARGS,
+ _curses_window_chgat__doc__
+ },
#endif
_CURSES_WINDOW_BKGDSET_METHODDEF
_CURSES_WINDOW_BORDER_METHODDEF
@@ -2877,7 +2934,10 @@ static PyMethodDef PyCursesWindow_methods[] = {
_CURSES_WINDOW_GET_WCH_METHODDEF
{"getmaxyx", PyCursesWindow_getmaxyx, METH_NOARGS},
{"getparyx", PyCursesWindow_getparyx, METH_NOARGS},
- {"getstr", PyCursesWindow_GetStr, METH_VARARGS},
+ {
+ "getstr", PyCursesWindow_getstr, METH_VARARGS,
+ _curses_window_getstr__doc__
+ },
{"getyx", PyCursesWindow_getyx, METH_NOARGS},
_CURSES_WINDOW_HLINE_METHODDEF
{"idcok", PyCursesWindow_idcok, METH_VARARGS},
@@ -2891,7 +2951,10 @@ static PyMethodDef PyCursesWindow_methods[] = {
{"insertln", PyCursesWindow_winsertln, METH_NOARGS},
_CURSES_WINDOW_INSNSTR_METHODDEF
_CURSES_WINDOW_INSSTR_METHODDEF
- {"instr", PyCursesWindow_InStr, METH_VARARGS},
+ {
+ "instr", PyCursesWindow_instr, METH_VARARGS,
+ _curses_window_instr__doc__
+ },
_CURSES_WINDOW_IS_LINETOUCHED_METHODDEF
{"is_wintouched", PyCursesWindow_is_wintouched, METH_NOARGS},
{"keypad", PyCursesWindow_keypad, METH_VARARGS},
@@ -2974,12 +3037,15 @@ static PyType_Spec PyCursesWindow_Type_spec = {
*
* These macros should only be used for generating the body of
* the module's methods since they need a module reference.
+ *
+ * The Python function name must be the same as the curses function name (X).
*/
-#define NoArgNoReturnFunctionBody(X) \
-{ \
- PyCursesStatefulInitialised(module); \
- return curses_check_err(module, X(), # X, NULL); }
+#define NoArgNoReturnFunctionBody(X) \
+{ \
+ PyCursesStatefulInitialised(module); \
+ return curses_check_err(module, X(), # X, NULL); \
+}
#define NoArgOrFlagNoReturnFunctionBody(X, FLAG) \
{ \
@@ -2997,26 +3063,40 @@ static PyType_Spec PyCursesWindow_Type_spec = {
return curses_check_err(module, rtn, funcname, # X); \
}
-#define NoArgReturnIntFunctionBody(X) \
-{ \
- PyCursesStatefulInitialised(module); \
- return PyLong_FromLong((long) X()); }
+#define NoArgReturnIntFunctionBody(X) \
+{ \
+ PyCursesStatefulInitialised(module); \
+ int rtn = X(); \
+ if (rtn == ERR) { \
+ curses_set_error(module, # X, NULL); \
+ return NULL; \
+ } \
+ return PyLong_FromLong(rtn); \
+}
-#define NoArgReturnStringFunctionBody(X) \
-{ \
- PyCursesStatefulInitialised(module); \
- return PyBytes_FromString(X()); }
+#define NoArgReturnStringFunctionBody(X) \
+{ \
+ PyCursesStatefulInitialised(module); \
+ const char *res = X(); \
+ if (res == NULL) { \
+ curses_set_null_error(module, # X, NULL); \
+ return NULL; \
+ } \
+ return PyBytes_FromString(res); \
+}
-#define NoArgTrueFalseFunctionBody(X) \
-{ \
- PyCursesStatefulInitialised(module); \
- return PyBool_FromLong(X()); }
+#define NoArgTrueFalseFunctionBody(X) \
+{ \
+ PyCursesStatefulInitialised(module); \
+ return PyBool_FromLong(X()); \
+}
-#define NoArgNoReturnVoidFunctionBody(X) \
-{ \
- PyCursesStatefulInitialised(module); \
- X(); \
- Py_RETURN_NONE; }
+#define NoArgNoReturnVoidFunctionBody(X) \
+{ \
+ PyCursesStatefulInitialised(module); \
+ X(); \
+ Py_RETURN_NONE; \
+}
/*********************************************************************
Global Functions
@@ -3627,8 +3707,12 @@ _curses_initscr_impl(PyObject *module)
WINDOW *win;
if (curses_initscr_called) {
- wrefresh(stdscr);
cursesmodule_state *state = get_cursesmodule_state(module);
+ int code = wrefresh(stdscr);
+ if (code == ERR) {
+ _curses_set_null_error(state, "wrefresh", "initscr");
+ return NULL;
+ }
return PyCursesWindow_New(state, stdscr, NULL, NULL);
}
@@ -3762,7 +3846,7 @@ _curses_setupterm_impl(PyObject *module, const char *term, int fd)
if (fd == -1) {
PyObject* sys_stdout;
- if (_PySys_GetOptionalAttrString("stdout", &sys_stdout) < 0) {
+ if (PySys_GetOptionalAttrString("stdout", &sys_stdout) < 0) {
return NULL;
}
@@ -4812,7 +4896,12 @@ _curses_unctrl(PyObject *module, PyObject *ch)
if (!PyCurses_ConvertToChtype(NULL, ch, &ch_))
return NULL;
- return PyBytes_FromString(unctrl(ch_));
+ const char *res = unctrl(ch_);
+ if (res == NULL) {
+ curses_set_null_error(module, "unctrl", NULL);
+ return NULL;
+ }
+ return PyBytes_FromString(res);
}
/*[clinic input]
@@ -4981,13 +5070,7 @@ _curses_assume_default_colors_impl(PyObject *module, int fg, int bg)
PyCursesStatefulInitialisedColor(module);
code = assume_default_colors(fg, bg);
- if (code != ERR) {
- Py_RETURN_NONE;
- } else {
- cursesmodule_state *state = get_cursesmodule_state(module);
- PyErr_SetString(state->error, "assume_default_colors() returned ERR");
- return NULL;
- }
+ return curses_check_err(module, code, "assume_default_colors", NULL);
}
#endif /* STRICT_SYSV_CURSES */
diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c
index 9c402e20e51..6a4939512b2 100644
--- a/Modules/_gdbmmodule.c
+++ b/Modules/_gdbmmodule.c
@@ -814,6 +814,11 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags,
iflags |= GDBM_NOLOCK;
break;
#endif
+#ifdef GDBM_NOMMAP
+ case 'm':
+ iflags |= GDBM_NOMMAP;
+ break;
+#endif
default:
PyErr_Format(state->gdbm_error,
"Flag '%c' is not supported.", (unsigned char)*flags);
@@ -847,6 +852,9 @@ static const char gdbmmodule_open_flags[] = "rwcn"
#ifdef GDBM_NOLOCK
"u"
#endif
+#ifdef GDBM_NOMMAP
+ "m"
+#endif
;
static PyMethodDef _gdbm_module_methods[] = {
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
index 48eed5eac97..331275076d7 100644
--- a/Modules/_hashopenssl.c
+++ b/Modules/_hashopenssl.c
@@ -38,6 +38,10 @@
#include <stdbool.h>
+#if OPENSSL_VERSION_NUMBER >= 0x30000000L
+# define Py_HAS_OPENSSL3_SUPPORT
+#endif
+
#ifndef OPENSSL_THREADS
# error "OPENSSL_THREADS is not defined, Python requires thread-safe OpenSSL"
#endif
@@ -55,7 +59,7 @@
#define PY_OPENSSL_HAS_BLAKE2 1
#endif
-#if OPENSSL_VERSION_NUMBER >= 0x30000000L
+#ifdef Py_HAS_OPENSSL3_SUPPORT
#define PY_EVP_MD EVP_MD
#define PY_EVP_MD_fetch(algorithm, properties) EVP_MD_fetch(NULL, algorithm, properties)
#define PY_EVP_MD_up_ref(md) EVP_MD_up_ref(md)
@@ -77,12 +81,12 @@
* py_alias as keys.
*/
-enum Py_hash_type {
- Py_ht_evp, // usedforsecurity=True / default
- Py_ht_evp_nosecurity, // usedforsecurity=False
- Py_ht_mac, // HMAC
- Py_ht_pbkdf2, // PKBDF2
-};
+typedef enum Py_hash_type {
+ Py_ht_evp, // usedforsecurity=True / default
+ Py_ht_evp_nosecurity, // usedforsecurity=False
+ Py_ht_mac, // HMAC
+ Py_ht_pbkdf2, // PKBDF2
+} Py_hash_type;
typedef struct {
const char *py_name;
@@ -255,10 +259,10 @@ py_hashentry_table_new(void) {
static PyModuleDef _hashlibmodule;
typedef struct {
- PyTypeObject *EVPtype;
+ PyTypeObject *HASH_type; // based on EVP_MD
PyTypeObject *HMACtype;
#ifdef PY_OPENSSL_HAS_SHAKE
- PyTypeObject *EVPXOFtype;
+ PyTypeObject *HASHXOF_type; // based on EVP_MD
#endif
PyObject *constructs;
PyObject *unsupported_digestmod_error;
@@ -275,13 +279,13 @@ get_hashlib_state(PyObject *module)
typedef struct {
PyObject_HEAD
- EVP_MD_CTX *ctx; /* OpenSSL message digest context */
+ EVP_MD_CTX *ctx; /* OpenSSL message digest context */
// Prevents undefined behavior via multiple threads entering the C API.
bool use_mutex;
- PyMutex mutex; /* OpenSSL context lock */
-} EVPobject;
+ PyMutex mutex; /* OpenSSL context lock */
+} HASHobject;
-#define EVPobject_CAST(op) ((EVPobject *)(op))
+#define HASHobject_CAST(op) ((HASHobject *)(op))
typedef struct {
PyObject_HEAD
@@ -296,11 +300,11 @@ typedef struct {
#include "clinic/_hashopenssl.c.h"
/*[clinic input]
module _hashlib
-class _hashlib.HASH "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPtype"
-class _hashlib.HASHXOF "EVPobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPXOFtype"
+class _hashlib.HASH "HASHobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPtype"
+class _hashlib.HASHXOF "HASHobject *" "((_hashlibstate *)PyModule_GetState(module))->EVPXOFtype"
class _hashlib.HMAC "HMACobject *" "((_hashlibstate *)PyModule_GetState(module))->HMACtype"
[clinic start generated code]*/
-/*[clinic end generated code: output=da39a3ee5e6b4b0d input=7df1bcf6f75cb8ef]*/
+/*[clinic end generated code: output=da39a3ee5e6b4b0d input=4f6b8873ed13d1ff]*/
/* LCOV_EXCL_START */
@@ -364,8 +368,8 @@ notify_ssl_error_occurred(void)
}
/* LCOV_EXCL_STOP */
-static PyObject*
-py_digest_name(const EVP_MD *md)
+static const char *
+get_openssl_evp_md_utf8name(const EVP_MD *md)
{
assert(md != NULL);
int nid = EVP_MD_nid(md);
@@ -388,13 +392,20 @@ py_digest_name(const EVP_MD *md)
if (name == NULL)
name = OBJ_nid2sn(nid);
}
+ return name;
+}
+static PyObject *
+get_openssl_evp_md_name(const EVP_MD *md)
+{
+ const char *name = get_openssl_evp_md_utf8name(md);
return PyUnicode_FromString(name);
}
/* Get EVP_MD by HID and purpose */
-static PY_EVP_MD*
-py_digest_by_name(PyObject *module, const char *name, enum Py_hash_type py_ht)
+static PY_EVP_MD *
+get_openssl_evp_md_by_utf8name(PyObject *module, const char *name,
+ Py_hash_type py_ht)
{
PY_EVP_MD *digest = NULL;
PY_EVP_MD *other_digest = NULL;
@@ -460,15 +471,17 @@ py_digest_by_name(PyObject *module, const char *name, enum Py_hash_type py_ht)
return digest;
}
-/* Get digest EVP from object
+/* Get digest EVP_MD from object
*
* * string
* * _hashopenssl builtin function
*
* on error returns NULL with exception set.
*/
-static PY_EVP_MD*
-py_digest_by_digestmod(PyObject *module, PyObject *digestmod, enum Py_hash_type py_ht) {
+static PY_EVP_MD *
+get_openssl_evp_md(PyObject *module, PyObject *digestmod,
+ Py_hash_type py_ht)
+{
PyObject *name_obj = NULL;
const char *name;
@@ -494,13 +507,13 @@ py_digest_by_digestmod(PyObject *module, PyObject *digestmod, enum Py_hash_type
return NULL;
}
- return py_digest_by_name(module, name, py_ht);
+ return get_openssl_evp_md_by_utf8name(module, name, py_ht);
}
-static EVPobject *
-newEVPobject(PyTypeObject *type)
+static HASHobject *
+new_hash_object(PyTypeObject *type)
{
- EVPobject *retval = PyObject_New(EVPobject, type);
+ HASHobject *retval = PyObject_New(HASHobject, type);
if (retval == NULL) {
return NULL;
}
@@ -517,7 +530,7 @@ newEVPobject(PyTypeObject *type)
}
static int
-EVP_hash(EVPobject *self, const void *vp, Py_ssize_t len)
+_hashlib_HASH_hash(HASHobject *self, const void *vp, Py_ssize_t len)
{
unsigned int process;
const unsigned char *cp = (const unsigned char *)vp;
@@ -539,9 +552,9 @@ EVP_hash(EVPobject *self, const void *vp, Py_ssize_t len)
/* Internal methods for a hash object */
static void
-EVP_dealloc(PyObject *op)
+_hashlib_HASH_dealloc(PyObject *op)
{
- EVPobject *self = EVPobject_CAST(op);
+ HASHobject *self = HASHobject_CAST(op);
PyTypeObject *tp = Py_TYPE(self);
EVP_MD_CTX_free(self->ctx);
PyObject_Free(self);
@@ -549,7 +562,7 @@ EVP_dealloc(PyObject *op)
}
static int
-locked_EVP_MD_CTX_copy(EVP_MD_CTX *new_ctx_p, EVPobject *self)
+_hashlib_HASH_copy_locked(HASHobject *self, EVP_MD_CTX *new_ctx_p)
{
int result;
ENTER_HASHLIB(self);
@@ -561,21 +574,21 @@ locked_EVP_MD_CTX_copy(EVP_MD_CTX *new_ctx_p, EVPobject *self)
/* External methods for a hash object */
/*[clinic input]
-_hashlib.HASH.copy as EVP_copy
+_hashlib.HASH.copy
Return a copy of the hash object.
[clinic start generated code]*/
static PyObject *
-EVP_copy_impl(EVPobject *self)
-/*[clinic end generated code: output=b370c21cdb8ca0b4 input=31455b6a3e638069]*/
+_hashlib_HASH_copy_impl(HASHobject *self)
+/*[clinic end generated code: output=2545541af18d53d7 input=814b19202cd08a26]*/
{
- EVPobject *newobj;
+ HASHobject *newobj;
- if ((newobj = newEVPobject(Py_TYPE(self))) == NULL)
+ if ((newobj = new_hash_object(Py_TYPE(self))) == NULL)
return NULL;
- if (!locked_EVP_MD_CTX_copy(newobj->ctx, self)) {
+ if (!_hashlib_HASH_copy_locked(self, newobj->ctx)) {
Py_DECREF(newobj);
notify_ssl_error_occurred();
return NULL;
@@ -584,14 +597,14 @@ EVP_copy_impl(EVPobject *self)
}
/*[clinic input]
-_hashlib.HASH.digest as EVP_digest
+_hashlib.HASH.digest
Return the digest value as a bytes object.
[clinic start generated code]*/
static PyObject *
-EVP_digest_impl(EVPobject *self)
-/*[clinic end generated code: output=0f6a3a0da46dc12d input=03561809a419bf00]*/
+_hashlib_HASH_digest_impl(HASHobject *self)
+/*[clinic end generated code: output=3fc6f9671d712850 input=d8d528d6e50af0de]*/
{
unsigned char digest[EVP_MAX_MD_SIZE];
EVP_MD_CTX *temp_ctx;
@@ -604,7 +617,7 @@ EVP_digest_impl(EVPobject *self)
return NULL;
}
- if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) {
+ if (!_hashlib_HASH_copy_locked(self, temp_ctx)) {
goto error;
}
digest_size = EVP_MD_CTX_size(temp_ctx);
@@ -623,14 +636,14 @@ error:
}
/*[clinic input]
-_hashlib.HASH.hexdigest as EVP_hexdigest
+_hashlib.HASH.hexdigest
Return the digest value as a string of hexadecimal digits.
[clinic start generated code]*/
static PyObject *
-EVP_hexdigest_impl(EVPobject *self)
-/*[clinic end generated code: output=18e6decbaf197296 input=aff9cf0e4c741a9a]*/
+_hashlib_HASH_hexdigest_impl(HASHobject *self)
+/*[clinic end generated code: output=1b8e60d9711e7f4d input=ae7553f78f8372d8]*/
{
unsigned char digest[EVP_MAX_MD_SIZE];
EVP_MD_CTX *temp_ctx;
@@ -643,7 +656,7 @@ EVP_hexdigest_impl(EVPobject *self)
}
/* Get the raw (binary) digest value */
- if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) {
+ if (!_hashlib_HASH_copy_locked(self, temp_ctx)) {
goto error;
}
digest_size = EVP_MD_CTX_size(temp_ctx);
@@ -662,7 +675,7 @@ error:
}
/*[clinic input]
-_hashlib.HASH.update as EVP_update
+_hashlib.HASH.update
obj: object
/
@@ -671,8 +684,8 @@ Update this hash object's state with the provided string.
[clinic start generated code]*/
static PyObject *
-EVP_update_impl(EVPobject *self, PyObject *obj)
-/*[clinic end generated code: output=d56f91c68348f95f input=9b30ec848f015501]*/
+_hashlib_HASH_update_impl(HASHobject *self, PyObject *obj)
+/*[clinic end generated code: output=62ad989754946b86 input=aa1ce20e3f92ceb6]*/
{
int result;
Py_buffer view;
@@ -685,11 +698,11 @@ EVP_update_impl(EVPobject *self, PyObject *obj)
if (self->use_mutex) {
Py_BEGIN_ALLOW_THREADS
PyMutex_Lock(&self->mutex);
- result = EVP_hash(self, view.buf, view.len);
+ result = _hashlib_HASH_hash(self, view.buf, view.len);
PyMutex_Unlock(&self->mutex);
Py_END_ALLOW_THREADS
} else {
- result = EVP_hash(self, view.buf, view.len);
+ result = _hashlib_HASH_hash(self, view.buf, view.len);
}
PyBuffer_Release(&view);
@@ -699,54 +712,54 @@ EVP_update_impl(EVPobject *self, PyObject *obj)
Py_RETURN_NONE;
}
-static PyMethodDef EVP_methods[] = {
- EVP_UPDATE_METHODDEF
- EVP_DIGEST_METHODDEF
- EVP_HEXDIGEST_METHODDEF
- EVP_COPY_METHODDEF
+static PyMethodDef HASH_methods[] = {
+ _HASHLIB_HASH_COPY_METHODDEF
+ _HASHLIB_HASH_DIGEST_METHODDEF
+ _HASHLIB_HASH_HEXDIGEST_METHODDEF
+ _HASHLIB_HASH_UPDATE_METHODDEF
{NULL, NULL} /* sentinel */
};
static PyObject *
-EVP_get_block_size(PyObject *op, void *Py_UNUSED(closure))
+_hashlib_HASH_get_blocksize(PyObject *op, void *Py_UNUSED(closure))
{
- EVPobject *self = EVPobject_CAST(op);
+ HASHobject *self = HASHobject_CAST(op);
long block_size = EVP_MD_CTX_block_size(self->ctx);
return PyLong_FromLong(block_size);
}
static PyObject *
-EVP_get_digest_size(PyObject *op, void *Py_UNUSED(closure))
+_hashlib_HASH_get_digestsize(PyObject *op, void *Py_UNUSED(closure))
{
- EVPobject *self = EVPobject_CAST(op);
+ HASHobject *self = HASHobject_CAST(op);
long size = EVP_MD_CTX_size(self->ctx);
return PyLong_FromLong(size);
}
static PyObject *
-EVP_get_name(PyObject *op, void *Py_UNUSED(closure))
+_hashlib_HASH_get_name(PyObject *op, void *Py_UNUSED(closure))
{
- EVPobject *self = EVPobject_CAST(op);
+ HASHobject *self = HASHobject_CAST(op);
const EVP_MD *md = EVP_MD_CTX_md(self->ctx);
if (md == NULL) {
notify_ssl_error_occurred();
return NULL;
}
- return py_digest_name(md);
+ return get_openssl_evp_md_name(md);
}
-static PyGetSetDef EVP_getseters[] = {
- {"digest_size", EVP_get_digest_size, NULL, NULL, NULL},
- {"block_size", EVP_get_block_size, NULL, NULL, NULL},
- {"name", EVP_get_name, NULL, NULL, PyDoc_STR("algorithm name.")},
+static PyGetSetDef HASH_getsets[] = {
+ {"digest_size", _hashlib_HASH_get_digestsize, NULL, NULL, NULL},
+ {"block_size", _hashlib_HASH_get_blocksize, NULL, NULL, NULL},
+ {"name", _hashlib_HASH_get_name, NULL, NULL, PyDoc_STR("algorithm name.")},
{NULL} /* Sentinel */
};
static PyObject *
-EVP_repr(PyObject *self)
+_hashlib_HASH_repr(PyObject *self)
{
- PyObject *name = EVP_get_name(self, NULL);
+ PyObject *name = _hashlib_HASH_get_name(self, NULL);
if (name == NULL) {
return NULL;
}
@@ -756,7 +769,7 @@ EVP_repr(PyObject *self)
return repr;
}
-PyDoc_STRVAR(hashtype_doc,
+PyDoc_STRVAR(HASHobject_type_doc,
"HASH(name, string=b\'\')\n"
"--\n"
"\n"
@@ -774,27 +787,31 @@ PyDoc_STRVAR(hashtype_doc,
"name -- the hash algorithm being used by this object\n"
"digest_size -- number of bytes in this hashes output");
-static PyType_Slot EVPtype_slots[] = {
- {Py_tp_dealloc, EVP_dealloc},
- {Py_tp_repr, EVP_repr},
- {Py_tp_doc, (char *)hashtype_doc},
- {Py_tp_methods, EVP_methods},
- {Py_tp_getset, EVP_getseters},
+static PyType_Slot HASHobject_type_slots[] = {
+ {Py_tp_dealloc, _hashlib_HASH_dealloc},
+ {Py_tp_repr, _hashlib_HASH_repr},
+ {Py_tp_doc, (char *)HASHobject_type_doc},
+ {Py_tp_methods, HASH_methods},
+ {Py_tp_getset, HASH_getsets},
{0, 0},
};
-static PyType_Spec EVPtype_spec = {
- "_hashlib.HASH", /*tp_name*/
- sizeof(EVPobject), /*tp_basicsize*/
- 0, /*tp_itemsize*/
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_IMMUTABLETYPE,
- EVPtype_slots
+static PyType_Spec HASHobject_type_spec = {
+ .name = "_hashlib.HASH",
+ .basicsize = sizeof(HASHobject),
+ .flags = (
+ Py_TPFLAGS_DEFAULT
+ | Py_TPFLAGS_BASETYPE
+ | Py_TPFLAGS_DISALLOW_INSTANTIATION
+ | Py_TPFLAGS_IMMUTABLETYPE
+ ),
+ .slots = HASHobject_type_slots
};
#ifdef PY_OPENSSL_HAS_SHAKE
/*[clinic input]
-_hashlib.HASHXOF.digest as EVPXOF_digest
+_hashlib.HASHXOF.digest
length: Py_ssize_t
@@ -802,8 +819,8 @@ Return the digest value as a bytes object.
[clinic start generated code]*/
static PyObject *
-EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length)
-/*[clinic end generated code: output=ef9320c23280efad input=816a6537cea3d1db]*/
+_hashlib_HASHXOF_digest_impl(HASHobject *self, Py_ssize_t length)
+/*[clinic end generated code: output=dcb09335dd2fe908 input=3eb034ce03c55b21]*/
{
EVP_MD_CTX *temp_ctx;
PyObject *retval = PyBytes_FromStringAndSize(NULL, length);
@@ -819,7 +836,7 @@ EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length)
return NULL;
}
- if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) {
+ if (!_hashlib_HASH_copy_locked(self, temp_ctx)) {
goto error;
}
if (!EVP_DigestFinalXOF(temp_ctx,
@@ -840,7 +857,7 @@ error:
}
/*[clinic input]
-_hashlib.HASHXOF.hexdigest as EVPXOF_hexdigest
+_hashlib.HASHXOF.hexdigest
length: Py_ssize_t
@@ -848,8 +865,8 @@ Return the digest value as a string of hexadecimal digits.
[clinic start generated code]*/
static PyObject *
-EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length)
-/*[clinic end generated code: output=eb3e6ee7788bf5b2 input=5f9d6a8f269e34df]*/
+_hashlib_HASHXOF_hexdigest_impl(HASHobject *self, Py_ssize_t length)
+/*[clinic end generated code: output=519431cafa014f39 input=0e58f7238adb7ab8]*/
{
unsigned char *digest;
EVP_MD_CTX *temp_ctx;
@@ -869,7 +886,7 @@ EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length)
}
/* Get the raw (binary) digest value */
- if (!locked_EVP_MD_CTX_copy(temp_ctx, self)) {
+ if (!_hashlib_HASH_copy_locked(self, temp_ctx)) {
goto error;
}
if (!EVP_DigestFinalXOF(temp_ctx, digest, length)) {
@@ -889,25 +906,26 @@ error:
return NULL;
}
-static PyMethodDef EVPXOF_methods[] = {
- EVPXOF_DIGEST_METHODDEF
- EVPXOF_HEXDIGEST_METHODDEF
+static PyMethodDef HASHXOFobject_methods[] = {
+ _HASHLIB_HASHXOF_DIGEST_METHODDEF
+ _HASHLIB_HASHXOF_HEXDIGEST_METHODDEF
{NULL, NULL} /* sentinel */
};
static PyObject *
-EVPXOF_get_digest_size(PyObject *Py_UNUSED(self), void *Py_UNUSED(closure))
+_hashlib_HASHXOF_digest_size(PyObject *Py_UNUSED(self),
+ void *Py_UNUSED(closure))
{
return PyLong_FromLong(0);
}
-static PyGetSetDef EVPXOF_getseters[] = {
- {"digest_size", EVPXOF_get_digest_size, NULL, NULL, NULL},
+static PyGetSetDef HASHXOFobject_getsets[] = {
+ {"digest_size", _hashlib_HASHXOF_digest_size, NULL, NULL, NULL},
{NULL} /* Sentinel */
};
-PyDoc_STRVAR(hashxoftype_doc,
+PyDoc_STRVAR(HASHXOFobject_type_doc,
"HASHXOF(name, string=b\'\')\n"
"--\n"
"\n"
@@ -925,38 +943,42 @@ PyDoc_STRVAR(hashxoftype_doc,
"name -- the hash algorithm being used by this object\n"
"digest_size -- number of bytes in this hashes output");
-static PyType_Slot EVPXOFtype_slots[] = {
- {Py_tp_doc, (char *)hashxoftype_doc},
- {Py_tp_methods, EVPXOF_methods},
- {Py_tp_getset, EVPXOF_getseters},
+static PyType_Slot HASHXOFobject_type_slots[] = {
+ {Py_tp_doc, (char *)HASHXOFobject_type_doc},
+ {Py_tp_methods, HASHXOFobject_methods},
+ {Py_tp_getset, HASHXOFobject_getsets},
{0, 0},
};
-static PyType_Spec EVPXOFtype_spec = {
- "_hashlib.HASHXOF", /*tp_name*/
- sizeof(EVPobject), /*tp_basicsize*/
- 0, /*tp_itemsize*/
- Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_DISALLOW_INSTANTIATION | Py_TPFLAGS_IMMUTABLETYPE,
- EVPXOFtype_slots
+static PyType_Spec HASHXOFobject_type_spec = {
+ .name = "_hashlib.HASHXOF",
+ .basicsize = sizeof(HASHobject),
+ .flags = (
+ Py_TPFLAGS_DEFAULT
+ | Py_TPFLAGS_BASETYPE
+ | Py_TPFLAGS_DISALLOW_INSTANTIATION
+ | Py_TPFLAGS_IMMUTABLETYPE
+ ),
+ .slots = HASHXOFobject_type_slots
};
#endif
-static PyObject*
-py_evp_fromname(PyObject *module, const char *digestname, PyObject *data_obj,
- int usedforsecurity)
+static PyObject *
+_hashlib_HASH(PyObject *module, const char *digestname, PyObject *data_obj,
+ int usedforsecurity)
{
Py_buffer view = { 0 };
PY_EVP_MD *digest = NULL;
PyTypeObject *type;
- EVPobject *self = NULL;
+ HASHobject *self = NULL;
if (data_obj != NULL) {
GET_BUFFER_VIEW_OR_ERROUT(data_obj, &view);
}
- digest = py_digest_by_name(
+ digest = get_openssl_evp_md_by_utf8name(
module, digestname, usedforsecurity ? Py_ht_evp : Py_ht_evp_nosecurity
);
if (digest == NULL) {
@@ -964,12 +986,12 @@ py_evp_fromname(PyObject *module, const char *digestname, PyObject *data_obj,
}
if ((EVP_MD_flags(digest) & EVP_MD_FLAG_XOF) == EVP_MD_FLAG_XOF) {
- type = get_hashlib_state(module)->EVPXOFtype;
+ type = get_hashlib_state(module)->HASHXOF_type;
} else {
- type = get_hashlib_state(module)->EVPtype;
+ type = get_hashlib_state(module)->HASH_type;
}
- self = newEVPobject(type);
+ self = new_hash_object(type);
if (self == NULL) {
goto exit;
}
@@ -994,10 +1016,10 @@ py_evp_fromname(PyObject *module, const char *digestname, PyObject *data_obj,
/* We do not initialize self->lock here as this is the constructor
* where it is not yet possible to have concurrent access. */
Py_BEGIN_ALLOW_THREADS
- result = EVP_hash(self, view.buf, view.len);
+ result = _hashlib_HASH_hash(self, view.buf, view.len);
Py_END_ALLOW_THREADS
} else {
- result = EVP_hash(self, view.buf, view.len);
+ result = _hashlib_HASH_hash(self, view.buf, view.len);
}
if (result == -1) {
assert(PyErr_Occurred());
@@ -1017,16 +1039,25 @@ exit:
return (PyObject *)self;
}
+#define CALL_HASHLIB_NEW(MODULE, NAME, DATA, STRING, USEDFORSECURITY) \
+ do { \
+ PyObject *data_obj; \
+ if (_Py_hashlib_data_argument(&data_obj, DATA, STRING) < 0) { \
+ return NULL; \
+ } \
+ return _hashlib_HASH(MODULE, NAME, data_obj, USEDFORSECURITY); \
+ } while (0)
/* The module-level function: new() */
/*[clinic input]
-_hashlib.new as EVP_new
+_hashlib.new as _hashlib_HASH_new
- name as name_obj: object
- string as data_obj: object(c_default="NULL") = b''
+ name: str
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new hash object using the named algorithm.
@@ -1037,136 +1068,137 @@ The MD5 and SHA1 algorithms are always supported.
[clinic start generated code]*/
static PyObject *
-EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=ddd5053f92dffe90 input=c24554d0337be1b0]*/
+_hashlib_HASH_new_impl(PyObject *module, const char *name, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=b905aaf9840c1bbd input=c34af6c6e696d44e]*/
{
- char *name;
- if (!PyArg_Parse(name_obj, "s", &name)) {
- PyErr_SetString(PyExc_TypeError, "name must be a string");
- return NULL;
- }
- return py_evp_fromname(module, name, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, name, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_md5
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a md5 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=87b0186440a44f8c input=990e36d5e689b16e]*/
+_hashlib_openssl_md5_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=ca8cf184d90f7432 input=e7c0adbd6a867db1]*/
{
- return py_evp_fromname(module, Py_hash_md5, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_md5, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha1
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha1 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=6813024cf690670d input=948f2f4b6deabc10]*/
+_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=1736fb7b310d64be input=f7e5bb1711e952d8]*/
{
- return py_evp_fromname(module, Py_hash_sha1, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha1, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha224
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha224 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=a2dfe7cc4eb14ebb input=f9272821fadca505]*/
+_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=0d6ff57be5e5c140 input=3820fff7ed3a53b8]*/
{
- return py_evp_fromname(module, Py_hash_sha224, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha224, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha256
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha256 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=1f874a34870f0a68 input=549fad9d2930d4c5]*/
+_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=412ea7111555b6e7 input=9a2f115cf1f7e0eb]*/
{
- return py_evp_fromname(module, Py_hash_sha256, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha256, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha384
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha384 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=58529eff9ca457b2 input=48601a6e3bf14ad7]*/
+_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=2e0dc395b59ed726 input=1ea48f6f01e77cfb]*/
{
- return py_evp_fromname(module, Py_hash_sha384, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha384, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha512
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha512 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=2c744c9e4a40d5f6 input=c5c46a2a817aa98f]*/
+_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=4bdd760388dbfc0f input=3cf56903e07d1f5c]*/
{
- return py_evp_fromname(module, Py_hash_sha512, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha512, data, string, usedforsecurity);
}
@@ -1175,77 +1207,81 @@ _hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
/*[clinic input]
_hashlib.openssl_sha3_224
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-224 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=144641c1d144b974 input=e3a01b2888916157]*/
+_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=6d8dc2a924f3ba35 input=7f14f16a9f6a3158]*/
{
- return py_evp_fromname(module, Py_hash_sha3_224, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_224, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha3_256
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-256 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=c61f1ab772d06668 input=e2908126c1b6deed]*/
+_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=9e520f537b3a4622 input=7987150939d5e352]*/
{
- return py_evp_fromname(module, Py_hash_sha3_256, data_obj , usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_256, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha3_384
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-384 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=f68e4846858cf0ee input=ec0edf5c792f8252]*/
+_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=d239ba0463fd6138 input=fc943401f67e3b81]*/
{
- return py_evp_fromname(module, Py_hash_sha3_384, data_obj , usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_384, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha3_512
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-512 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=2eede478c159354a input=64e2cc0c094d56f4]*/
+_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=17662f21038c2278 input=6601ddd2c6c1516d]*/
{
- return py_evp_fromname(module, Py_hash_sha3_512, data_obj , usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_512, data, string, usedforsecurity);
}
#endif /* PY_OPENSSL_HAS_SHA3 */
@@ -1253,42 +1289,46 @@ _hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
/*[clinic input]
_hashlib.openssl_shake_128
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a shake-128 variable hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=bc49cdd8ada1fa97 input=6c9d67440eb33ec8]*/
+_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=4e6afed8d18980ad input=373c3f1c93d87b37]*/
{
- return py_evp_fromname(module, Py_hash_shake_128, data_obj , usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_shake_128, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_shake_256
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a shake-256 variable hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=358d213be8852df7 input=479cbe9fefd4a9f8]*/
+_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=62481bce4a77d16c input=101c139ea2ddfcbf]*/
{
- return py_evp_fromname(module, Py_hash_shake_256, data_obj , usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_shake_256, data, string, usedforsecurity);
}
#endif /* PY_OPENSSL_HAS_SHAKE */
+#undef CALL_HASHLIB_NEW
+
/*[clinic input]
_hashlib.pbkdf2_hmac as pbkdf2_hmac
@@ -1312,7 +1352,7 @@ pbkdf2_hmac_impl(PyObject *module, const char *hash_name,
long dklen;
int retval;
- PY_EVP_MD *digest = py_digest_by_name(module, hash_name, Py_ht_pbkdf2);
+ PY_EVP_MD *digest = get_openssl_evp_md_by_utf8name(module, hash_name, Py_ht_pbkdf2);
if (digest == NULL) {
goto end;
}
@@ -1514,7 +1554,7 @@ _hashlib_hmac_singleshot_impl(PyObject *module, Py_buffer *key,
return NULL;
}
- evp = py_digest_by_digestmod(module, digest, Py_ht_mac);
+ evp = get_openssl_evp_md(module, digest, Py_ht_mac);
if (evp == NULL) {
return NULL;
}
@@ -1583,7 +1623,7 @@ _hashlib_hmac_new_impl(PyObject *module, Py_buffer *key, PyObject *msg_obj,
return NULL;
}
- digest = py_digest_by_digestmod(module, digestmod, Py_ht_mac);
+ digest = get_openssl_evp_md(module, digestmod, Py_ht_mac);
if (digest == NULL) {
return NULL;
}
@@ -1740,7 +1780,7 @@ _hmac_repr(PyObject *op)
if (md == NULL) {
return NULL;
}
- PyObject *digest_name = py_digest_name(md);
+ PyObject *digest_name = get_openssl_evp_md_name(md);
if (digest_name == NULL) {
return NULL;
}
@@ -1860,7 +1900,7 @@ _hashlib_hmac_get_name(PyObject *op, void *Py_UNUSED(closure))
if (md == NULL) {
return NULL;
}
- PyObject *digest_name = py_digest_name(md);
+ PyObject *digest_name = get_openssl_evp_md_name(md);
if (digest_name == NULL) {
return NULL;
}
@@ -1926,7 +1966,7 @@ typedef struct _internal_name_mapper_state {
/* A callback function to pass to OpenSSL's OBJ_NAME_do_all(...) */
static void
-#if OPENSSL_VERSION_NUMBER >= 0x30000000L
+#ifdef Py_HAS_OPENSSL3_SUPPORT
_openssl_hash_name_mapper(EVP_MD *md, void *arg)
#else
_openssl_hash_name_mapper(const EVP_MD *md, const char *from,
@@ -1942,7 +1982,7 @@ _openssl_hash_name_mapper(const EVP_MD *md, const char *from,
return;
}
- py_name = py_digest_name(md);
+ py_name = get_openssl_evp_md_name(md);
if (py_name == NULL) {
state->error = 1;
} else {
@@ -1966,7 +2006,7 @@ hashlib_md_meth_names(PyObject *module)
return -1;
}
-#if OPENSSL_VERSION_NUMBER >= 0x30000000L
+#ifdef Py_HAS_OPENSSL3_SUPPORT
// get algorithms from all activated providers in default context
EVP_MD_do_all_provided(NULL, &_openssl_hash_name_mapper, &state);
#else
@@ -1999,7 +2039,7 @@ _hashlib_get_fips_mode_impl(PyObject *module)
/*[clinic end generated code: output=87eece1bab4d3fa9 input=2db61538c41c6fef]*/
{
-#if OPENSSL_VERSION_NUMBER >= 0x30000000L
+#ifdef Py_HAS_OPENSSL3_SUPPORT
return EVP_default_properties_is_fips_enabled(NULL);
#else
ERR_clear_error();
@@ -2134,7 +2174,7 @@ _hashlib_compare_digest_impl(PyObject *module, PyObject *a, PyObject *b)
/* List of functions exported by this module */
static struct PyMethodDef EVP_functions[] = {
- EVP_NEW_METHODDEF
+ _HASHLIB_HASH_NEW_METHODDEF
PBKDF2_HMAC_METHODDEF
_HASHLIB_SCRYPT_METHODDEF
_HASHLIB_GET_FIPS_MODE_METHODDEF
@@ -2163,10 +2203,10 @@ static int
hashlib_traverse(PyObject *m, visitproc visit, void *arg)
{
_hashlibstate *state = get_hashlib_state(m);
- Py_VISIT(state->EVPtype);
+ Py_VISIT(state->HASH_type);
Py_VISIT(state->HMACtype);
#ifdef PY_OPENSSL_HAS_SHAKE
- Py_VISIT(state->EVPXOFtype);
+ Py_VISIT(state->HASHXOF_type);
#endif
Py_VISIT(state->constructs);
Py_VISIT(state->unsupported_digestmod_error);
@@ -2177,10 +2217,10 @@ static int
hashlib_clear(PyObject *m)
{
_hashlibstate *state = get_hashlib_state(m);
- Py_CLEAR(state->EVPtype);
+ Py_CLEAR(state->HASH_type);
Py_CLEAR(state->HMACtype);
#ifdef PY_OPENSSL_HAS_SHAKE
- Py_CLEAR(state->EVPXOFtype);
+ Py_CLEAR(state->HASHXOF_type);
#endif
Py_CLEAR(state->constructs);
Py_CLEAR(state->unsupported_digestmod_error);
@@ -2214,37 +2254,37 @@ hashlib_init_hashtable(PyObject *module)
}
static int
-hashlib_init_evptype(PyObject *module)
+hashlib_init_HASH_type(PyObject *module)
{
_hashlibstate *state = get_hashlib_state(module);
- state->EVPtype = (PyTypeObject *)PyType_FromSpec(&EVPtype_spec);
- if (state->EVPtype == NULL) {
+ state->HASH_type = (PyTypeObject *)PyType_FromSpec(&HASHobject_type_spec);
+ if (state->HASH_type == NULL) {
return -1;
}
- if (PyModule_AddType(module, state->EVPtype) < 0) {
+ if (PyModule_AddType(module, state->HASH_type) < 0) {
return -1;
}
return 0;
}
static int
-hashlib_init_evpxoftype(PyObject *module)
+hashlib_init_HASHXOF_type(PyObject *module)
{
#ifdef PY_OPENSSL_HAS_SHAKE
_hashlibstate *state = get_hashlib_state(module);
- if (state->EVPtype == NULL) {
+ if (state->HASH_type == NULL) {
return -1;
}
- state->EVPXOFtype = (PyTypeObject *)PyType_FromSpecWithBases(
- &EVPXOFtype_spec, (PyObject *)state->EVPtype
+ state->HASHXOF_type = (PyTypeObject *)PyType_FromSpecWithBases(
+ &HASHXOFobject_type_spec, (PyObject *)state->HASH_type
);
- if (state->EVPXOFtype == NULL) {
+ if (state->HASHXOF_type == NULL) {
return -1;
}
- if (PyModule_AddType(module, state->EVPXOFtype) < 0) {
+ if (PyModule_AddType(module, state->HASHXOF_type) < 0) {
return -1;
}
#endif
@@ -2341,8 +2381,8 @@ hashlib_constants(PyObject *module)
static PyModuleDef_Slot hashlib_slots[] = {
{Py_mod_exec, hashlib_init_hashtable},
- {Py_mod_exec, hashlib_init_evptype},
- {Py_mod_exec, hashlib_init_evpxoftype},
+ {Py_mod_exec, hashlib_init_HASH_type},
+ {Py_mod_exec, hashlib_init_HASHXOF_type},
{Py_mod_exec, hashlib_init_hmactype},
{Py_mod_exec, hashlib_md_meth_names},
{Py_mod_exec, hashlib_init_constructors},
diff --git a/Modules/_interpchannelsmodule.c b/Modules/_interpchannelsmodule.c
index bfd805bf5e4..ea2e5f99dfa 100644
--- a/Modules/_interpchannelsmodule.c
+++ b/Modules/_interpchannelsmodule.c
@@ -254,10 +254,10 @@ _get_current_module_state(void)
{
PyObject *mod = _get_current_module();
if (mod == NULL) {
- // XXX import it?
- PyErr_SetString(PyExc_RuntimeError,
- MODULE_NAME_STR " module not imported yet");
- return NULL;
+ mod = PyImport_ImportModule(MODULE_NAME_STR);
+ if (mod == NULL) {
+ return NULL;
+ }
}
module_state *state = get_module_state(mod);
Py_DECREF(mod);
diff --git a/Modules/_interpqueuesmodule.c b/Modules/_interpqueuesmodule.c
index ffc52c8ee74..71d8fd8716c 100644
--- a/Modules/_interpqueuesmodule.c
+++ b/Modules/_interpqueuesmodule.c
@@ -1356,10 +1356,10 @@ _queueobj_from_xid(_PyXIData_t *data)
PyObject *mod = _get_current_module();
if (mod == NULL) {
- // XXX import it?
- PyErr_SetString(PyExc_RuntimeError,
- MODULE_NAME_STR " module not imported yet");
- return NULL;
+ mod = PyImport_ImportModule(MODULE_NAME_STR);
+ if (mod == NULL) {
+ return NULL;
+ }
}
PyTypeObject *cls = get_external_queue_type(mod);
diff --git a/Modules/_interpretersmodule.c b/Modules/_interpretersmodule.c
index f4807fd214b..037e9544543 100644
--- a/Modules/_interpretersmodule.c
+++ b/Modules/_interpretersmodule.c
@@ -9,7 +9,6 @@
#include "pycore_code.h" // _PyCode_HAS_EXECUTORS()
#include "pycore_crossinterp.h" // _PyXIData_t
#include "pycore_pyerrors.h" // _PyErr_GetRaisedException()
-#include "pycore_function.h" // _PyFunction_VerifyStateless()
#include "pycore_interp.h" // _PyInterpreterState_IDIncref()
#include "pycore_modsupport.h" // _PyArg_BadArgument()
#include "pycore_namespace.h" // _PyNamespace_New()
@@ -73,6 +72,32 @@ is_running_main(PyInterpreterState *interp)
}
+static inline int
+is_notshareable_raised(PyThreadState *tstate)
+{
+ PyObject *exctype = _PyXIData_GetNotShareableErrorType(tstate);
+ return _PyErr_ExceptionMatches(tstate, exctype);
+}
+
+static void
+unwrap_not_shareable(PyThreadState *tstate)
+{
+ if (!is_notshareable_raised(tstate)) {
+ return;
+ }
+ PyObject *exc = _PyErr_GetRaisedException(tstate);
+ PyObject *cause = PyException_GetCause(exc);
+ if (cause != NULL) {
+ Py_DECREF(exc);
+ exc = cause;
+ }
+ else {
+ assert(PyException_GetContext(exc) == NULL);
+ }
+ _PyErr_SetRaisedException(tstate, exc);
+}
+
+
/* Cross-interpreter Buffer Views *******************************************/
/* When a memoryview object is "shared" between interpreters,
@@ -321,10 +346,10 @@ _get_current_module_state(void)
{
PyObject *mod = _get_current_module();
if (mod == NULL) {
- // XXX import it?
- PyErr_SetString(PyExc_RuntimeError,
- MODULE_NAME_STR " module not imported yet");
- return NULL;
+ mod = PyImport_ImportModule(MODULE_NAME_STR);
+ if (mod == NULL) {
+ return NULL;
+ }
}
module_state *state = get_module_state(mod);
Py_DECREF(mod);
@@ -361,81 +386,6 @@ _get_current_xibufferview_type(void)
}
-/* Python code **************************************************************/
-
-static const char *
-check_code_str(PyUnicodeObject *text)
-{
- assert(text != NULL);
- if (PyUnicode_GET_LENGTH(text) == 0) {
- return "too short";
- }
-
- // XXX Verify that it parses?
-
- return NULL;
-}
-
-#ifndef NDEBUG
-static int
-code_has_args(PyCodeObject *code)
-{
- assert(code != NULL);
- return (code->co_argcount > 0
- || code->co_posonlyargcount > 0
- || code->co_kwonlyargcount > 0
- || code->co_flags & (CO_VARARGS | CO_VARKEYWORDS));
-}
-#endif
-
-#define RUN_TEXT 1
-#define RUN_CODE 2
-
-static const char *
-get_code_str(PyObject *arg, Py_ssize_t *len_p, PyObject **bytes_p, int *flags_p)
-{
- const char *codestr = NULL;
- Py_ssize_t len = -1;
- PyObject *bytes_obj = NULL;
- int flags = 0;
-
- if (PyUnicode_Check(arg)) {
- assert(PyUnicode_Check(arg)
- && (check_code_str((PyUnicodeObject *)arg) == NULL));
- codestr = PyUnicode_AsUTF8AndSize(arg, &len);
- if (codestr == NULL) {
- return NULL;
- }
- if (strlen(codestr) != (size_t)len) {
- PyErr_SetString(PyExc_ValueError,
- "source code string cannot contain null bytes");
- return NULL;
- }
- flags = RUN_TEXT;
- }
- else {
- assert(PyCode_Check(arg));
- assert(_PyCode_VerifyStateless(
- PyThreadState_Get(), (PyCodeObject *)arg, NULL, NULL, NULL) == 0);
- assert(!code_has_args((PyCodeObject *)arg));
- flags = RUN_CODE;
-
- // Serialize the code object.
- bytes_obj = PyMarshal_WriteObjectToString(arg, Py_MARSHAL_VERSION);
- if (bytes_obj == NULL) {
- return NULL;
- }
- codestr = PyBytes_AS_STRING(bytes_obj);
- len = PyBytes_GET_SIZE(bytes_obj);
- }
-
- *flags_p = flags;
- *bytes_p = bytes_obj;
- *len_p = len;
- return codestr;
-}
-
-
/* interpreter-specific code ************************************************/
static int
@@ -498,85 +448,265 @@ config_from_object(PyObject *configobj, PyInterpreterConfig *config)
}
+struct interp_call {
+ _PyXIData_t *func;
+ _PyXIData_t *args;
+ _PyXIData_t *kwargs;
+ struct {
+ _PyXIData_t func;
+ _PyXIData_t args;
+ _PyXIData_t kwargs;
+ } _preallocated;
+};
+
+static void
+_interp_call_clear(struct interp_call *call)
+{
+ if (call->func != NULL) {
+ _PyXIData_Clear(NULL, call->func);
+ }
+ if (call->args != NULL) {
+ _PyXIData_Clear(NULL, call->args);
+ }
+ if (call->kwargs != NULL) {
+ _PyXIData_Clear(NULL, call->kwargs);
+ }
+ *call = (struct interp_call){0};
+}
+
static int
-_run_script(PyObject *ns, const char *codestr, Py_ssize_t codestrlen, int flags)
+_interp_call_pack(PyThreadState *tstate, struct interp_call *call,
+ PyObject *func, PyObject *args, PyObject *kwargs)
{
- PyObject *result = NULL;
- if (flags & RUN_TEXT) {
- result = PyRun_StringFlags(codestr, Py_file_input, ns, ns, NULL);
- }
- else if (flags & RUN_CODE) {
- PyObject *code = PyMarshal_ReadObjectFromString(codestr, codestrlen);
- if (code != NULL) {
- result = PyEval_EvalCode(code, ns, ns);
- Py_DECREF(code);
+ xidata_fallback_t fallback = _PyXIDATA_FULL_FALLBACK;
+ assert(call->func == NULL);
+ assert(call->args == NULL);
+ assert(call->kwargs == NULL);
+ // Handle the func.
+ if (!PyCallable_Check(func)) {
+ _PyErr_Format(tstate, PyExc_TypeError,
+ "expected a callable, got %R", func);
+ return -1;
+ }
+ if (_PyFunction_GetXIData(tstate, func, &call->_preallocated.func) < 0) {
+ PyObject *exc = _PyErr_GetRaisedException(tstate);
+ if (_PyPickle_GetXIData(tstate, func, &call->_preallocated.func) < 0) {
+ _PyErr_SetRaisedException(tstate, exc);
+ return -1;
+ }
+ Py_DECREF(exc);
+ }
+ call->func = &call->_preallocated.func;
+ // Handle the args.
+ if (args == NULL || args == Py_None) {
+ // Leave it empty.
+ }
+ else {
+ assert(PyTuple_Check(args));
+ if (PyTuple_GET_SIZE(args) > 0) {
+ if (_PyObject_GetXIData(
+ tstate, args, fallback, &call->_preallocated.args) < 0)
+ {
+ _interp_call_clear(call);
+ return -1;
+ }
+ call->args = &call->_preallocated.args;
}
}
+ // Handle the kwargs.
+ if (kwargs == NULL || kwargs == Py_None) {
+ // Leave it empty.
+ }
else {
- Py_UNREACHABLE();
+ assert(PyDict_Check(kwargs));
+ if (PyDict_GET_SIZE(kwargs) > 0) {
+ if (_PyObject_GetXIData(
+ tstate, kwargs, fallback, &call->_preallocated.kwargs) < 0)
+ {
+ _interp_call_clear(call);
+ return -1;
+ }
+ call->kwargs = &call->_preallocated.kwargs;
+ }
}
+ return 0;
+}
+
+static int
+_interp_call_unpack(struct interp_call *call,
+ PyObject **p_func, PyObject **p_args, PyObject **p_kwargs)
+{
+ // Unpack the func.
+ PyObject *func = _PyXIData_NewObject(call->func);
+ if (func == NULL) {
+ return -1;
+ }
+ // Unpack the args.
+ PyObject *args;
+ if (call->args == NULL) {
+ args = PyTuple_New(0);
+ if (args == NULL) {
+ Py_DECREF(func);
+ return -1;
+ }
+ }
+ else {
+ args = _PyXIData_NewObject(call->args);
+ if (args == NULL) {
+ Py_DECREF(func);
+ return -1;
+ }
+ assert(PyTuple_Check(args));
+ }
+ // Unpack the kwargs.
+ PyObject *kwargs = NULL;
+ if (call->kwargs != NULL) {
+ kwargs = _PyXIData_NewObject(call->kwargs);
+ if (kwargs == NULL) {
+ Py_DECREF(func);
+ Py_DECREF(args);
+ return -1;
+ }
+ assert(PyDict_Check(kwargs));
+ }
+ *p_func = func;
+ *p_args = args;
+ *p_kwargs = kwargs;
+ return 0;
+}
+
+static int
+_make_call(struct interp_call *call,
+ PyObject **p_result, _PyXI_errcode *p_errcode)
+{
+ assert(call != NULL && call->func != NULL);
+ PyThreadState *tstate = _PyThreadState_GET();
+
+ // Get the func and args.
+ PyObject *func = NULL, *args = NULL, *kwargs = NULL;
+ if (_interp_call_unpack(call, &func, &args, &kwargs) < 0) {
+ assert(func == NULL);
+ assert(args == NULL);
+ assert(kwargs == NULL);
+ *p_errcode = is_notshareable_raised(tstate)
+ ? _PyXI_ERR_NOT_SHAREABLE
+ : _PyXI_ERR_OTHER;
+ return -1;
+ }
+ *p_errcode = _PyXI_ERR_NO_ERROR;
+
+ // Make the call.
+ PyObject *resobj = PyObject_Call(func, args, kwargs);
+ Py_DECREF(func);
+ Py_XDECREF(args);
+ Py_XDECREF(kwargs);
+ if (resobj == NULL) {
+ return -1;
+ }
+ *p_result = resobj;
+ return 0;
+}
+
+static int
+_run_script(_PyXIData_t *script, PyObject *ns, _PyXI_errcode *p_errcode)
+{
+ PyObject *code = _PyXIData_NewObject(script);
+ if (code == NULL) {
+ *p_errcode = _PyXI_ERR_NOT_SHAREABLE;
+ return -1;
+ }
+ PyObject *result = PyEval_EvalCode(code, ns, ns);
+ Py_DECREF(code);
if (result == NULL) {
+ *p_errcode = _PyXI_ERR_UNCAUGHT_EXCEPTION;
return -1;
}
+ assert(result == Py_None);
Py_DECREF(result); // We throw away the result.
return 0;
}
+struct run_result {
+ PyObject *result;
+ PyObject *excinfo;
+};
+
+static void
+_run_result_clear(struct run_result *runres)
+{
+ Py_CLEAR(runres->result);
+ Py_CLEAR(runres->excinfo);
+}
+
static int
-_run_in_interpreter(PyInterpreterState *interp,
- const char *codestr, Py_ssize_t codestrlen,
- PyObject *shareables, int flags,
- PyObject **p_excinfo)
+_run_in_interpreter(PyThreadState *tstate, PyInterpreterState *interp,
+ _PyXIData_t *script, struct interp_call *call,
+ PyObject *shareables, struct run_result *runres)
{
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
_PyXI_session *session = _PyXI_NewSession();
if (session == NULL) {
return -1;
}
+ _PyXI_session_result result = {0};
// Prep and switch interpreters.
- if (_PyXI_Enter(session, interp, shareables) < 0) {
- if (PyErr_Occurred()) {
- // If an error occured at this step, it means that interp
- // was not prepared and switched.
- _PyXI_FreeSession(session);
- return -1;
- }
- // Now, apply the error from another interpreter:
- PyObject *excinfo = _PyXI_ApplyCapturedException(session);
- if (excinfo != NULL) {
- *p_excinfo = excinfo;
- }
- assert(PyErr_Occurred());
+ if (_PyXI_Enter(session, interp, shareables, &result) < 0) {
+ // If an error occured at this step, it means that interp
+ // was not prepared and switched.
_PyXI_FreeSession(session);
+ assert(result.excinfo == NULL);
return -1;
}
- // Run the script.
+ // Run in the interpreter.
int res = -1;
- PyObject *mainns = _PyXI_GetMainNamespace(session);
- if (mainns == NULL) {
- goto finally;
+ _PyXI_errcode errcode = _PyXI_ERR_NO_ERROR;
+ if (script != NULL) {
+ assert(call == NULL);
+ PyObject *mainns = _PyXI_GetMainNamespace(session, &errcode);
+ if (mainns == NULL) {
+ goto finally;
+ }
+ res = _run_script(script, mainns, &errcode);
}
- res = _run_script(mainns, codestr, codestrlen, flags);
+ else {
+ assert(call != NULL);
+ PyObject *resobj;
+ res = _make_call(call, &resobj, &errcode);
+ if (res == 0) {
+ res = _PyXI_Preserve(session, "resobj", resobj, &errcode);
+ Py_DECREF(resobj);
+ if (res < 0) {
+ goto finally;
+ }
+ }
+ }
+ int exitres;
finally:
// Clean up and switch back.
- _PyXI_Exit(session);
+ exitres = _PyXI_Exit(session, errcode, &result);
+ assert(res == 0 || exitres != 0);
+ _PyXI_FreeSession(session);
- // Propagate any exception out to the caller.
- assert(!PyErr_Occurred());
- if (res < 0) {
- PyObject *excinfo = _PyXI_ApplyCapturedException(session);
- if (excinfo != NULL) {
- *p_excinfo = excinfo;
- }
+ res = exitres;
+ if (_PyErr_Occurred(tstate)) {
+ assert(res < 0);
+ }
+ else if (res < 0) {
+ assert(result.excinfo != NULL);
+ runres->excinfo = Py_NewRef(result.excinfo);
+ res = -1;
}
else {
- assert(!_PyXI_HasCapturedException(session));
+ assert(result.excinfo == NULL);
+ runres->result = _PyXI_GetPreserved(&result, "resobj");
+ if (_PyErr_Occurred(tstate)) {
+ res = -1;
+ }
}
-
- _PyXI_FreeSession(session);
+ _PyXI_ClearResult(&result);
return res;
}
@@ -927,21 +1057,23 @@ interp_set___main___attrs(PyObject *self, PyObject *args, PyObject *kwargs)
}
// Prep and switch interpreters, including apply the updates.
- if (_PyXI_Enter(session, interp, updates) < 0) {
- if (!PyErr_Occurred()) {
- _PyXI_ApplyCapturedException(session);
- assert(PyErr_Occurred());
- }
- else {
- assert(!_PyXI_HasCapturedException(session));
- }
+ if (_PyXI_Enter(session, interp, updates, NULL) < 0) {
_PyXI_FreeSession(session);
return NULL;
}
// Clean up and switch back.
- _PyXI_Exit(session);
+ assert(!PyErr_Occurred());
+ int res = _PyXI_Exit(session, _PyXI_ERR_NO_ERROR, NULL);
_PyXI_FreeSession(session);
+ assert(res == 0);
+ if (res < 0) {
+ // unreachable
+ if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_RuntimeError, "unresolved error");
+ }
+ return NULL;
+ }
Py_RETURN_NONE;
}
@@ -952,104 +1084,16 @@ PyDoc_STRVAR(set___main___attrs_doc,
Bind the given attributes in the interpreter's __main__ module.");
-static PyUnicodeObject *
-convert_script_arg(PyThreadState *tstate,
- PyObject *arg, const char *fname, const char *displayname,
- const char *expected)
-{
- PyUnicodeObject *str = NULL;
- if (PyUnicode_CheckExact(arg)) {
- str = (PyUnicodeObject *)Py_NewRef(arg);
- }
- else if (PyUnicode_Check(arg)) {
- // XXX str = PyUnicode_FromObject(arg);
- str = (PyUnicodeObject *)Py_NewRef(arg);
- }
- else {
- _PyArg_BadArgument(fname, displayname, expected, arg);
- return NULL;
- }
-
- const char *err = check_code_str(str);
- if (err != NULL) {
- Py_DECREF(str);
- _PyErr_Format(tstate, PyExc_ValueError,
- "%.200s(): bad script text (%s)", fname, err);
- return NULL;
- }
-
- return str;
-}
-
-static PyCodeObject *
-convert_code_arg(PyThreadState *tstate,
- PyObject *arg, const char *fname, const char *displayname,
- const char *expected)
+static PyObject *
+_handle_script_error(struct run_result *runres)
{
- PyObject *cause;
- PyCodeObject *code = NULL;
- if (PyFunction_Check(arg)) {
- // For now we allow globals, so we can't use
- // _PyFunction_VerifyStateless().
- PyObject *codeobj = PyFunction_GetCode(arg);
- if (_PyCode_VerifyStateless(
- tstate, (PyCodeObject *)codeobj, NULL, NULL, NULL) < 0) {
- goto chained;
- }
- code = (PyCodeObject *)Py_NewRef(codeobj);
- }
- else if (PyCode_Check(arg)) {
- if (_PyCode_VerifyStateless(
- tstate, (PyCodeObject *)arg, NULL, NULL, NULL) < 0) {
- goto chained;
- }
- code = (PyCodeObject *)Py_NewRef(arg);
- }
- else {
- _PyArg_BadArgument(fname, displayname, expected, arg);
+ assert(runres->result == NULL);
+ if (runres->excinfo == NULL) {
+ assert(PyErr_Occurred());
return NULL;
}
-
- return code;
-
-chained:
- cause = _PyErr_GetRaisedException(tstate);
- assert(cause != NULL);
- _PyArg_BadArgument(fname, displayname, expected, arg);
- PyObject *exc = _PyErr_GetRaisedException(tstate);
- PyException_SetCause(exc, cause);
- _PyErr_SetRaisedException(tstate, exc);
- return NULL;
-}
-
-static int
-_interp_exec(PyObject *self, PyInterpreterState *interp,
- PyObject *code_arg, PyObject *shared_arg, PyObject **p_excinfo)
-{
- if (shared_arg != NULL && !PyDict_CheckExact(shared_arg)) {
- PyErr_SetString(PyExc_TypeError, "expected 'shared' to be a dict");
- return -1;
- }
-
- // Extract code.
- Py_ssize_t codestrlen = -1;
- PyObject *bytes_obj = NULL;
- int flags = 0;
- const char *codestr = get_code_str(code_arg,
- &codestrlen, &bytes_obj, &flags);
- if (codestr == NULL) {
- return -1;
- }
-
- // Run the code in the interpreter.
- int res = _run_in_interpreter(interp, codestr, codestrlen,
- shared_arg, flags, p_excinfo);
- Py_XDECREF(bytes_obj);
- if (res < 0) {
- return -1;
- }
-
- return 0;
+ assert(!PyErr_Occurred());
+ return runres->excinfo;
}
static PyObject *
@@ -1062,8 +1106,9 @@ interp_exec(PyObject *self, PyObject *args, PyObject *kwds)
PyObject *shared = NULL;
int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OO|O$p:" FUNCNAME, kwlist,
- &id, &code, &shared, &restricted))
+ "OO|O!$p:" FUNCNAME, kwlist,
+ &id, &code, &PyDict_Type, &shared,
+ &restricted))
{
return NULL;
}
@@ -1075,26 +1120,22 @@ interp_exec(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- const char *expected = "a string, a function, or a code object";
- if (PyUnicode_Check(code)) {
- code = (PyObject *)convert_script_arg(tstate, code, FUNCNAME,
- "argument 2", expected);
- }
- else {
- code = (PyObject *)convert_code_arg(tstate, code, FUNCNAME,
- "argument 2", expected);
- }
- if (code == NULL) {
+ // We don't need the script to be "pure", which means it can use
+ // global variables. They will be resolved against __main__.
+ _PyXIData_t xidata = {0};
+ if (_PyCode_GetScriptXIData(tstate, code, &xidata) < 0) {
+ unwrap_not_shareable(tstate);
return NULL;
}
- PyObject *excinfo = NULL;
- int res = _interp_exec(self, interp, code, shared, &excinfo);
- Py_DECREF(code);
+ struct run_result runres = {0};
+ int res = _run_in_interpreter(
+ tstate, interp, &xidata, NULL, shared, &runres);
+ _PyXIData_Release(&xidata);
if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
+ return _handle_script_error(&runres);
}
+ assert(runres.result == NULL);
Py_RETURN_NONE;
#undef FUNCNAME
}
@@ -1126,8 +1167,9 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds)
PyObject *shared = NULL;
int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OU|O$p:" FUNCNAME, kwlist,
- &id, &script, &shared, &restricted))
+ "OU|O!$p:" FUNCNAME, kwlist,
+ &id, &script, &PyDict_Type, &shared,
+ &restricted))
{
return NULL;
}
@@ -1139,19 +1181,25 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- script = (PyObject *)convert_script_arg(tstate, script, FUNCNAME,
- "argument 2", "a string");
- if (script == NULL) {
+ if (PyFunction_Check(script) || PyCode_Check(script)) {
+ _PyArg_BadArgument(FUNCNAME, "argument 2", "a string", script);
return NULL;
}
- PyObject *excinfo = NULL;
- int res = _interp_exec(self, interp, script, shared, &excinfo);
- Py_DECREF(script);
+ _PyXIData_t xidata = {0};
+ if (_PyCode_GetScriptXIData(tstate, script, &xidata) < 0) {
+ unwrap_not_shareable(tstate);
+ return NULL;
+ }
+
+ struct run_result runres = {0};
+ int res = _run_in_interpreter(
+ tstate, interp, &xidata, NULL, shared, &runres);
+ _PyXIData_Release(&xidata);
if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
+ return _handle_script_error(&runres);
}
+ assert(runres.result == NULL);
Py_RETURN_NONE;
#undef FUNCNAME
}
@@ -1173,8 +1221,9 @@ interp_run_func(PyObject *self, PyObject *args, PyObject *kwds)
PyObject *shared = NULL;
int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OO|O$p:" FUNCNAME, kwlist,
- &id, &func, &shared, &restricted))
+ "OO|O!$p:" FUNCNAME, kwlist,
+ &id, &func, &PyDict_Type, &shared,
+ &restricted))
{
return NULL;
}
@@ -1186,20 +1235,34 @@ interp_run_func(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- PyCodeObject *code = convert_code_arg(tstate, func, FUNCNAME,
- "argument 2",
- "a function or a code object");
- if (code == NULL) {
+ // We don't worry about checking globals. They will be resolved
+ // against __main__.
+ PyObject *code;
+ if (PyFunction_Check(func)) {
+ code = PyFunction_GET_CODE(func);
+ }
+ else if (PyCode_Check(func)) {
+ code = func;
+ }
+ else {
+ _PyArg_BadArgument(FUNCNAME, "argument 2", "a function", func);
return NULL;
}
- PyObject *excinfo = NULL;
- int res = _interp_exec(self, interp, (PyObject *)code, shared, &excinfo);
- Py_DECREF(code);
+ _PyXIData_t xidata = {0};
+ if (_PyCode_GetScriptXIData(tstate, code, &xidata) < 0) {
+ unwrap_not_shareable(tstate);
+ return NULL;
+ }
+
+ struct run_result runres = {0};
+ int res = _run_in_interpreter(
+ tstate, interp, &xidata, NULL, shared, &runres);
+ _PyXIData_Release(&xidata);
if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
+ return _handle_script_error(&runres);
}
+ assert(runres.result == NULL);
Py_RETURN_NONE;
#undef FUNCNAME
}
@@ -1219,15 +1282,18 @@ interp_call(PyObject *self, PyObject *args, PyObject *kwds)
#define FUNCNAME MODULE_NAME_STR ".call"
PyThreadState *tstate = _PyThreadState_GET();
static char *kwlist[] = {"id", "callable", "args", "kwargs",
- "restrict", NULL};
+ "preserve_exc", "restrict", NULL};
PyObject *id, *callable;
PyObject *args_obj = NULL;
PyObject *kwargs_obj = NULL;
+ int preserve_exc = 0;
int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OO|OO$p:" FUNCNAME, kwlist,
- &id, &callable, &args_obj, &kwargs_obj,
- &restricted))
+ "OO|O!O!$pp:" FUNCNAME, kwlist,
+ &id, &callable,
+ &PyTuple_Type, &args_obj,
+ &PyDict_Type, &kwargs_obj,
+ &preserve_exc, &restricted))
{
return NULL;
}
@@ -1239,29 +1305,29 @@ interp_call(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- if (args_obj != NULL) {
- _PyErr_SetString(tstate, PyExc_ValueError, "got unexpected args");
- return NULL;
- }
- if (kwargs_obj != NULL) {
- _PyErr_SetString(tstate, PyExc_ValueError, "got unexpected kwargs");
+ struct interp_call call = {0};
+ if (_interp_call_pack(tstate, &call, callable, args_obj, kwargs_obj) < 0) {
return NULL;
}
- PyObject *code = (PyObject *)convert_code_arg(tstate, callable, FUNCNAME,
- "argument 2", "a function");
- if (code == NULL) {
- return NULL;
+ PyObject *res_and_exc = NULL;
+ struct run_result runres = {0};
+ if (_run_in_interpreter(tstate, interp, NULL, &call, NULL, &runres) < 0) {
+ if (runres.excinfo == NULL) {
+ assert(_PyErr_Occurred(tstate));
+ goto finally;
+ }
+ assert(!_PyErr_Occurred(tstate));
}
+ assert(runres.result == NULL || runres.excinfo == NULL);
+ res_and_exc = Py_BuildValue("OO",
+ (runres.result ? runres.result : Py_None),
+ (runres.excinfo ? runres.excinfo : Py_None));
- PyObject *excinfo = NULL;
- int res = _interp_exec(self, interp, code, NULL, &excinfo);
- Py_DECREF(code);
- if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
- }
- Py_RETURN_NONE;
+finally:
+ _interp_call_clear(&call);
+ _run_result_clear(&runres);
+ return res_and_exc;
#undef FUNCNAME
}
@@ -1269,13 +1335,7 @@ PyDoc_STRVAR(call_doc,
"call(id, callable, args=None, kwargs=None, *, restrict=False)\n\
\n\
Call the provided object in the identified interpreter.\n\
-Pass the given args and kwargs, if possible.\n\
-\n\
-\"callable\" may be a plain function with no free vars that takes\n\
-no arguments.\n\
-\n\
-The function's code object is used and all its state\n\
-is ignored, including its __globals__ dict.");
+Pass the given args and kwargs, if possible.");
static PyObject *
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
index 86328e46a7b..3808ecdceb9 100644
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -1578,6 +1578,8 @@ _io_TextIOWrapper_detach_impl(textio *self)
static int
_textiowrapper_writeflush(textio *self)
{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
+
if (self->pending_bytes == NULL)
return 0;
@@ -3173,8 +3175,9 @@ _io_TextIOWrapper_close_impl(textio *self)
}
static PyObject *
-textiowrapper_iternext(PyObject *op)
+textiowrapper_iternext_lock_held(PyObject *op)
{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op);
PyObject *line;
textio *self = textio_CAST(op);
@@ -3210,6 +3213,16 @@ textiowrapper_iternext(PyObject *op)
return line;
}
+static PyObject *
+textiowrapper_iternext(PyObject *op)
+{
+ PyObject *result;
+ Py_BEGIN_CRITICAL_SECTION(op);
+ result = textiowrapper_iternext_lock_held(op);
+ Py_END_CRITICAL_SECTION();
+ return result;
+}
+
/*[clinic input]
@critical_section
@getter
diff --git a/Modules/_json.c b/Modules/_json.c
index 89b0a41dd10..6b5f6ea42df 100644
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -360,13 +360,6 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) {
return tpl;
}
-static inline int
-_PyUnicodeWriter_IsEmpty(PyUnicodeWriter *writer_pub)
-{
- _PyUnicodeWriter *writer = (_PyUnicodeWriter*)writer_pub;
- return (writer->pos == 0);
-}
-
static PyObject *
scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr)
{
@@ -385,10 +378,7 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
const void *buf;
int kind;
- PyUnicodeWriter *writer = PyUnicodeWriter_Create(0);
- if (writer == NULL) {
- goto bail;
- }
+ PyUnicodeWriter *writer = NULL;
len = PyUnicode_GET_LENGTH(pystr);
buf = PyUnicode_DATA(pystr);
@@ -419,12 +409,11 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
if (c == '"') {
// Fast path for simple case.
- if (_PyUnicodeWriter_IsEmpty(writer)) {
+ if (writer == NULL) {
PyObject *ret = PyUnicode_Substring(pystr, end, next);
if (ret == NULL) {
goto bail;
}
- PyUnicodeWriter_Discard(writer);
*next_end_ptr = next + 1;;
return ret;
}
@@ -432,6 +421,11 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
else if (c != '\\') {
raise_errmsg("Unterminated string starting at", pystr, begin);
goto bail;
+ } else if (writer == NULL) {
+ writer = PyUnicodeWriter_Create(0);
+ if (writer == NULL) {
+ goto bail;
+ }
}
/* Pick up this chunk if it's not zero length */
@@ -1476,13 +1470,13 @@ encoder_listencode_obj(PyEncoderObject *s, PyUnicodeWriter *writer,
int rv;
if (obj == Py_None) {
- return PyUnicodeWriter_WriteUTF8(writer, "null", 4);
+ return PyUnicodeWriter_WriteASCII(writer, "null", 4);
}
else if (obj == Py_True) {
- return PyUnicodeWriter_WriteUTF8(writer, "true", 4);
+ return PyUnicodeWriter_WriteASCII(writer, "true", 4);
}
else if (obj == Py_False) {
- return PyUnicodeWriter_WriteUTF8(writer, "false", 5);
+ return PyUnicodeWriter_WriteASCII(writer, "false", 5);
}
else if (PyUnicode_Check(obj)) {
PyObject *encoded = encoder_encode_string(s, obj);
@@ -1609,6 +1603,12 @@ encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *firs
if (*first) {
*first = false;
+ if (s->indent != Py_None) {
+ if (write_newline_indent(writer, indent_level, indent_cache) < 0) {
+ Py_DECREF(keystr);
+ return -1;
+ }
+ }
}
else {
if (PyUnicodeWriter_WriteStr(writer, item_separator) < 0) {
@@ -1649,7 +1649,7 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer,
if (PyDict_GET_SIZE(dct) == 0) {
/* Fast path */
- return PyUnicodeWriter_WriteUTF8(writer, "{}", 2);
+ return PyUnicodeWriter_WriteASCII(writer, "{}", 2);
}
if (s->markers != Py_None) {
@@ -1676,11 +1676,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer,
if (s->indent != Py_None) {
indent_level++;
separator = get_item_separator(s, indent_level, indent_cache);
- if (separator == NULL ||
- write_newline_indent(writer, indent_level, indent_cache) < 0)
- {
+ if (separator == NULL)
goto bail;
- }
}
if (s->sort_keys || !PyDict_CheckExact(dct)) {
@@ -1720,7 +1717,7 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer,
goto bail;
Py_CLEAR(ident);
}
- if (s->indent != Py_None) {
+ if (s->indent != Py_None && !first) {
indent_level--;
if (write_newline_indent(writer, indent_level, indent_cache) < 0) {
goto bail;
@@ -1753,7 +1750,7 @@ encoder_listencode_list(PyEncoderObject *s, PyUnicodeWriter *writer,
return -1;
if (PySequence_Fast_GET_SIZE(s_fast) == 0) {
Py_DECREF(s_fast);
- return PyUnicodeWriter_WriteUTF8(writer, "[]", 2);
+ return PyUnicodeWriter_WriteASCII(writer, "[]", 2);
}
if (s->markers != Py_None) {
diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c
index 626c176715b..bbad5eb6903 100644
--- a/Modules/_lsprof.c
+++ b/Modules/_lsprof.c
@@ -782,7 +782,7 @@ _lsprof_Profiler_enable_impl(ProfilerObject *self, int subcalls,
return NULL;
}
- PyObject* monitoring = PyImport_ImportModuleAttrString("sys", "monitoring");
+ PyObject* monitoring = PySys_GetAttrString("monitoring");
if (!monitoring) {
return NULL;
}
@@ -864,7 +864,7 @@ _lsprof_Profiler_disable_impl(ProfilerObject *self)
}
if (self->flags & POF_ENABLED) {
PyObject* result = NULL;
- PyObject* monitoring = PyImport_ImportModuleAttrString("sys", "monitoring");
+ PyObject* monitoring = PySys_GetAttrString("monitoring");
if (!monitoring) {
return NULL;
@@ -983,7 +983,7 @@ profiler_init_impl(ProfilerObject *self, PyObject *timer, double timeunit,
Py_XSETREF(self->externalTimer, Py_XNewRef(timer));
self->tool_id = PY_MONITORING_PROFILER_ID;
- PyObject* monitoring = PyImport_ImportModuleAttrString("sys", "monitoring");
+ PyObject* monitoring = PySys_GetAttrString("monitoring");
if (!monitoring) {
return -1;
}
diff --git a/Modules/_pickle.c b/Modules/_pickle.c
index 29ef0cb0c2e..86d8b38620c 100644
--- a/Modules/_pickle.c
+++ b/Modules/_pickle.c
@@ -1915,7 +1915,7 @@ whichmodule(PickleState *st, PyObject *global, PyObject *global_name, PyObject *
__module__ can be None. If it is so, then search sys.modules for
the module of global. */
Py_CLEAR(module_name);
- modules = _PySys_GetRequiredAttr(&_Py_ID(modules));
+ modules = PySys_GetAttr(&_Py_ID(modules));
if (modules == NULL) {
return NULL;
}
diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c
index d5bac2f5b78..2f4f388ce11 100644
--- a/Modules/_randommodule.c
+++ b/Modules/_randommodule.c
@@ -497,34 +497,32 @@ _random_Random_setstate_impl(RandomObject *self, PyObject *state)
_random.Random.getrandbits
self: self(type="RandomObject *")
- k: int
+ k: uint64
/
getrandbits(k) -> x. Generates an int with k random bits.
[clinic start generated code]*/
static PyObject *
-_random_Random_getrandbits_impl(RandomObject *self, int k)
-/*[clinic end generated code: output=b402f82a2158887f input=87603cd60f79f730]*/
+_random_Random_getrandbits_impl(RandomObject *self, uint64_t k)
+/*[clinic end generated code: output=c30ef8435f3433cf input=64226ac13bb4d2a3]*/
{
- int i, words;
+ Py_ssize_t i, words;
uint32_t r;
uint32_t *wordarray;
PyObject *result;
- if (k < 0) {
- PyErr_SetString(PyExc_ValueError,
- "number of bits must be non-negative");
- return NULL;
- }
-
if (k == 0)
return PyLong_FromLong(0);
if (k <= 32) /* Fast path */
return PyLong_FromUnsignedLong(genrand_uint32(self) >> (32 - k));
- words = (k - 1) / 32 + 1;
+ if ((k - 1u) / 32u + 1u > PY_SSIZE_T_MAX / 4u) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ words = (k - 1u) / 32u + 1u;
wordarray = (uint32_t *)PyMem_Malloc(words * 4);
if (wordarray == NULL) {
PyErr_NoMemory();
diff --git a/Modules/_remote_debugging_module.c b/Modules/_remote_debugging_module.c
index a13cbd63ad3..ea58f38006e 100644
--- a/Modules/_remote_debugging_module.c
+++ b/Modules/_remote_debugging_module.c
@@ -108,6 +108,7 @@ typedef struct {
uintptr_t tstate_addr;
uint64_t code_object_generation;
_Py_hashtable_t *code_object_cache;
+ int debug;
#ifdef Py_GIL_DISABLED
// TLBC cache invalidation tracking
uint32_t tlbc_generation; // Track TLBC index pool changes
@@ -194,6 +195,11 @@ static int parse_frame_object(
* UTILITY FUNCTIONS AND HELPERS
* ============================================================================ */
+#define set_exception_cause(unwinder, exc_type, message) \
+ if (unwinder->debug) { \
+ _set_debug_exception_cause(exc_type, message); \
+ }
+
static void
cached_code_metadata_destroy(void *ptr)
{
@@ -218,13 +224,70 @@ RemoteDebugging_InitState(RemoteDebuggingState *st)
return 0;
}
-// Helper to chain exceptions and avoid repetitions
-static void
-chain_exceptions(PyObject *exception, const char *string)
+static int
+is_prerelease_version(uint64_t version)
+{
+ return (version & 0xF0) != 0xF0;
+}
+
+static inline int
+validate_debug_offsets(struct _Py_DebugOffsets *debug_offsets)
{
- PyObject *exc = PyErr_GetRaisedException();
- PyErr_SetString(exception, string);
- _PyErr_ChainExceptions1(exc);
+ if (memcmp(debug_offsets->cookie, _Py_Debug_Cookie, sizeof(debug_offsets->cookie)) != 0) {
+ // The remote is probably running a Python version predating debug offsets.
+ PyErr_SetString(
+ PyExc_RuntimeError,
+ "Can't determine the Python version of the remote process");
+ return -1;
+ }
+
+ // Assume debug offsets could change from one pre-release version to another,
+ // or one minor version to another, but are stable across patch versions.
+ if (is_prerelease_version(Py_Version) && Py_Version != debug_offsets->version) {
+ PyErr_SetString(
+ PyExc_RuntimeError,
+ "Can't attach from a pre-release Python interpreter"
+ " to a process running a different Python version");
+ return -1;
+ }
+
+ if (is_prerelease_version(debug_offsets->version) && Py_Version != debug_offsets->version) {
+ PyErr_SetString(
+ PyExc_RuntimeError,
+ "Can't attach to a pre-release Python interpreter"
+ " from a process running a different Python version");
+ return -1;
+ }
+
+ unsigned int remote_major = (debug_offsets->version >> 24) & 0xFF;
+ unsigned int remote_minor = (debug_offsets->version >> 16) & 0xFF;
+
+ if (PY_MAJOR_VERSION != remote_major || PY_MINOR_VERSION != remote_minor) {
+ PyErr_Format(
+ PyExc_RuntimeError,
+ "Can't attach from a Python %d.%d process to a Python %d.%d process",
+ PY_MAJOR_VERSION, PY_MINOR_VERSION, remote_major, remote_minor);
+ return -1;
+ }
+
+ // The debug offsets differ between free threaded and non-free threaded builds.
+ if (_Py_Debug_Free_Threaded && !debug_offsets->free_threaded) {
+ PyErr_SetString(
+ PyExc_RuntimeError,
+ "Cannot attach from a free-threaded Python process"
+ " to a process running a non-free-threaded version");
+ return -1;
+ }
+
+ if (!_Py_Debug_Free_Threaded && debug_offsets->free_threaded) {
+ PyErr_SetString(
+ PyExc_RuntimeError,
+ "Cannot attach to a free-threaded Python process"
+ " from a process running a non-free-threaded version");
+ return -1;
+ }
+
+ return 0;
}
/* ============================================================================
@@ -232,29 +295,32 @@ chain_exceptions(PyObject *exception, const char *string)
* ============================================================================ */
static inline int
-read_ptr(proc_handle_t *handle, uintptr_t address, uintptr_t *ptr_addr)
+read_ptr(RemoteUnwinderObject *unwinder, uintptr_t address, uintptr_t *ptr_addr)
{
- int result = _Py_RemoteDebug_PagedReadRemoteMemory(handle, address, sizeof(void*), ptr_addr);
+ int result = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, address, sizeof(void*), ptr_addr);
if (result < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read pointer from remote memory");
return -1;
}
return 0;
}
static inline int
-read_Py_ssize_t(proc_handle_t *handle, uintptr_t address, Py_ssize_t *size)
+read_Py_ssize_t(RemoteUnwinderObject *unwinder, uintptr_t address, Py_ssize_t *size)
{
- int result = _Py_RemoteDebug_PagedReadRemoteMemory(handle, address, sizeof(Py_ssize_t), size);
+ int result = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, address, sizeof(Py_ssize_t), size);
if (result < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read Py_ssize_t from remote memory");
return -1;
}
return 0;
}
static int
-read_py_ptr(proc_handle_t *handle, uintptr_t address, uintptr_t *ptr_addr)
+read_py_ptr(RemoteUnwinderObject *unwinder, uintptr_t address, uintptr_t *ptr_addr)
{
- if (read_ptr(handle, address, ptr_addr)) {
+ if (read_ptr(unwinder, address, ptr_addr)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read Python pointer");
return -1;
}
*ptr_addr &= ~Py_TAG_BITS;
@@ -262,10 +328,11 @@ read_py_ptr(proc_handle_t *handle, uintptr_t address, uintptr_t *ptr_addr)
}
static int
-read_char(proc_handle_t *handle, uintptr_t address, char *result)
+read_char(RemoteUnwinderObject *unwinder, uintptr_t address, char *result)
{
- int res = _Py_RemoteDebug_PagedReadRemoteMemory(handle, address, sizeof(char), result);
+ int res = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, address, sizeof(char), result);
if (res < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read char from remote memory");
return -1;
}
return 0;
@@ -293,6 +360,7 @@ read_py_str(
unicode_obj
);
if (res < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read PyUnicodeObject");
goto err;
}
@@ -300,24 +368,28 @@ read_py_str(
if (len < 0 || len > max_len) {
PyErr_Format(PyExc_RuntimeError,
"Invalid string length (%zd) at 0x%lx", len, address);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Invalid string length in remote Unicode object");
return NULL;
}
buf = (char *)PyMem_RawMalloc(len+1);
if (buf == NULL) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate buffer for string reading");
return NULL;
}
size_t offset = unwinder->debug_offsets.unicode_object.asciiobject_size;
res = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, address + offset, len, buf);
if (res < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read string data from remote memory");
goto err;
}
buf[len] = '\0';
result = PyUnicode_FromStringAndSize(buf, len);
if (result == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create PyUnicode from remote string data");
goto err;
}
@@ -350,31 +422,36 @@ read_py_bytes(
bytes_obj
);
if (res < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read PyBytesObject");
goto err;
}
Py_ssize_t len = GET_MEMBER(Py_ssize_t, bytes_obj, unwinder->debug_offsets.bytes_object.ob_size);
if (len < 0 || len > max_len) {
PyErr_Format(PyExc_RuntimeError,
- "Invalid string length (%zd) at 0x%lx", len, address);
+ "Invalid bytes length (%zd) at 0x%lx", len, address);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Invalid bytes length in remote bytes object");
return NULL;
}
buf = (char *)PyMem_RawMalloc(len+1);
if (buf == NULL) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate buffer for bytes reading");
return NULL;
}
size_t offset = unwinder->debug_offsets.bytes_object.ob_sval;
res = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, address + offset, len, buf);
if (res < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read bytes data from remote memory");
goto err;
}
buf[len] = '\0';
result = PyBytes_FromStringAndSize(buf, len);
if (result == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create PyBytes from remote bytes data");
goto err;
}
@@ -405,6 +482,7 @@ read_py_long(
unwinder->debug_offsets.long_object.size,
long_obj);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read PyLongObject");
return -1;
}
@@ -423,6 +501,7 @@ read_py_long(
digits = (digit *)PyMem_RawMalloc(size * sizeof(digit));
if (!digits) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate digits for small PyLong");
return -1;
}
memcpy(digits, long_obj + unwinder->debug_offsets.long_object.ob_digit, size * sizeof(digit));
@@ -431,6 +510,7 @@ read_py_long(
digits = (digit *)PyMem_RawMalloc(size * sizeof(digit));
if (!digits) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate digits for large PyLong");
return -1;
}
@@ -441,6 +521,7 @@ read_py_long(
digits
);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read PyLong digits from remote memory");
goto error;
}
}
@@ -519,11 +600,15 @@ read_async_debug(
) {
uintptr_t async_debug_addr = _Py_RemoteDebug_GetAsyncioDebugAddress(&unwinder->handle);
if (!async_debug_addr) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to get AsyncioDebug address");
return -1;
}
size_t size = sizeof(struct _Py_AsyncioModuleDebugOffsets);
int result = _Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, async_debug_addr, size, &unwinder->async_debug_offsets);
+ if (result < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read AsyncioDebug offsets");
+ }
return result;
}
@@ -544,6 +629,7 @@ parse_task_name(
unwinder->async_debug_offsets.asyncio_task_object.size,
task_obj);
if (err < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task object");
return NULL;
}
@@ -558,6 +644,7 @@ parse_task_name(
SIZEOF_PYOBJECT,
task_name_obj);
if (err < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task name object");
return NULL;
}
@@ -569,13 +656,14 @@ parse_task_name(
SIZEOF_TYPE_OBJ,
type_obj);
if (err < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task name type object");
return NULL;
}
if ((GET_MEMBER(unsigned long, type_obj, unwinder->debug_offsets.type_object.tp_flags) & Py_TPFLAGS_LONG_SUBCLASS)) {
long res = read_py_long(unwinder, task_name_addr);
if (res == -1) {
- chain_exceptions(PyExc_RuntimeError, "Failed to get task name");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Task name PyLong parsing failed");
return NULL;
}
return PyUnicode_FromFormat("Task-%d", res);
@@ -583,6 +671,7 @@ parse_task_name(
if(!(GET_MEMBER(unsigned long, type_obj, unwinder->debug_offsets.type_object.tp_flags) & Py_TPFLAGS_UNICODE_SUBCLASS)) {
PyErr_SetString(PyExc_RuntimeError, "Invalid task name object");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Task name object is neither long nor unicode");
return NULL;
}
@@ -604,6 +693,7 @@ static int parse_task_awaited_by(
if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, task_address,
unwinder->async_debug_offsets.asyncio_task_object.size,
task_obj) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task object in awaited_by parsing");
return -1;
}
@@ -618,10 +708,12 @@ static int parse_task_awaited_by(
if (awaited_by_is_a_set) {
if (parse_tasks_in_set(unwinder, task_ab_addr, awaited_by, recurse_task)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse tasks in awaited_by set");
return -1;
}
} else {
if (parse_task(unwinder, task_ab_addr, awaited_by, recurse_task)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse single awaited_by task");
return -1;
}
}
@@ -644,6 +736,7 @@ handle_yield_from_frame(
SIZEOF_INTERP_FRAME,
iframe);
if (err < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read interpreter frame in yield_from handler");
return -1;
}
@@ -651,6 +744,7 @@ handle_yield_from_frame(
PyErr_SetString(
PyExc_RuntimeError,
"generator doesn't own its frame \\_o_/");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Frame ownership mismatch in yield_from");
return -1;
}
@@ -660,20 +754,22 @@ handle_yield_from_frame(
if ((void*)stackpointer_addr != NULL) {
uintptr_t gi_await_addr;
err = read_py_ptr(
- &unwinder->handle,
+ unwinder,
stackpointer_addr - sizeof(void*),
&gi_await_addr);
if (err) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read gi_await address");
return -1;
}
if ((void*)gi_await_addr != NULL) {
uintptr_t gi_await_addr_type_addr;
err = read_ptr(
- &unwinder->handle,
+ unwinder,
gi_await_addr + unwinder->debug_offsets.pyobject.ob_type,
&gi_await_addr_type_addr);
if (err) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read gi_await type address");
return -1;
}
@@ -690,6 +786,7 @@ handle_yield_from_frame(
*/
err = parse_coro_chain(unwinder, gi_await_addr, render_to);
if (err) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse coroutine chain in yield_from");
return -1;
}
}
@@ -715,6 +812,7 @@ parse_coro_chain(
SIZEOF_GEN_OBJ,
gen_object);
if (err < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read generator object in coro chain");
return -1;
}
@@ -726,11 +824,13 @@ parse_coro_chain(
uintptr_t prev_frame;
uintptr_t gi_iframe_addr = coro_address + unwinder->debug_offsets.gen_object.gi_iframe;
if (parse_frame_object(unwinder, &name, gi_iframe_addr, &prev_frame) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse frame object in coro chain");
return -1;
}
if (PyList_Append(render_to, name)) {
Py_DECREF(name);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append frame to coro chain");
return -1;
}
Py_DECREF(name);
@@ -756,15 +856,18 @@ create_task_result(
result = PyList_New(0);
if (result == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create task result list");
goto error;
}
call_stack = PyList_New(0);
if (call_stack == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create call stack list");
goto error;
}
if (PyList_Append(result, call_stack)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append call stack to task result");
goto error;
}
Py_CLEAR(call_stack);
@@ -775,10 +878,12 @@ create_task_result(
tn = PyLong_FromUnsignedLongLong(task_address);
}
if (tn == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create task name/address");
goto error;
}
if (PyList_Append(result, tn)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append task name to result");
goto error;
}
Py_CLEAR(tn);
@@ -787,6 +892,7 @@ create_task_result(
if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, task_address,
unwinder->async_debug_offsets.asyncio_task_object.size,
task_obj) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task object for coro chain");
goto error;
}
@@ -796,21 +902,25 @@ create_task_result(
if ((void*)coro_addr != NULL) {
call_stack = PyList_New(0);
if (call_stack == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create coro call stack list");
goto error;
}
if (parse_coro_chain(unwinder, coro_addr, call_stack) < 0) {
Py_DECREF(call_stack);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse coroutine chain");
goto error;
}
if (PyList_Reverse(call_stack)) {
Py_DECREF(call_stack);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to reverse call stack");
goto error;
}
if (PyList_SetItem(result, 0, call_stack) < 0) {
Py_DECREF(call_stack);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to set call stack in result");
goto error;
}
}
@@ -837,36 +947,42 @@ parse_task(
int err;
err = read_char(
- &unwinder->handle,
+ unwinder,
task_address + unwinder->async_debug_offsets.asyncio_task_object.task_is_task,
&is_task);
if (err) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read is_task flag");
goto error;
}
if (is_task) {
result = create_task_result(unwinder, task_address, recurse_task);
if (!result) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create task result");
goto error;
}
} else {
result = PyList_New(0);
if (result == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create empty task result");
goto error;
}
}
if (PyList_Append(render_to, result)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append task result to render list");
goto error;
}
if (recurse_task) {
awaited_by = PyList_New(0);
if (awaited_by == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by list");
goto error;
}
if (PyList_Append(result, awaited_by)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append awaited_by to result");
goto error;
}
Py_DECREF(awaited_by);
@@ -877,6 +993,7 @@ parse_task(
// it's borrowed from 'result' and will be decrefed when result is
// deleted.
awaited_by = NULL;
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse task awaited_by relationships");
goto error;
}
}
@@ -898,19 +1015,22 @@ process_set_entry(
int recurse_task
) {
uintptr_t key_addr;
- if (read_py_ptr(&unwinder->handle, table_ptr, &key_addr)) {
+ if (read_py_ptr(unwinder, table_ptr, &key_addr)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read set entry key");
return -1;
}
if ((void*)key_addr != NULL) {
Py_ssize_t ref_cnt;
- if (read_Py_ssize_t(&unwinder->handle, table_ptr, &ref_cnt)) {
+ if (read_Py_ssize_t(unwinder, table_ptr, &ref_cnt)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read set entry reference count");
return -1;
}
if (ref_cnt) {
// if 'ref_cnt=0' it's a set dummy marker
if (parse_task(unwinder, key_addr, awaited_by, recurse_task)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse task in set entry");
return -1;
}
return 1; // Successfully processed a valid entry
@@ -933,6 +1053,7 @@ parse_tasks_in_set(
SIZEOF_SET_OBJ,
set_object);
if (err < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read set object");
return -1;
}
@@ -946,6 +1067,7 @@ parse_tasks_in_set(
int result = process_set_entry(unwinder, table_ptr, awaited_by, recurse_task);
if (result < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to process set entry");
return -1;
}
if (result > 0) {
@@ -960,10 +1082,11 @@ parse_tasks_in_set(
static int
-setup_async_result_structure(PyObject **result, PyObject **calls)
+setup_async_result_structure(RemoteUnwinderObject *unwinder, PyObject **result, PyObject **calls)
{
*result = PyList_New(1);
if (*result == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create async result structure");
return -1;
}
@@ -971,6 +1094,7 @@ setup_async_result_structure(PyObject **result, PyObject **calls)
if (*calls == NULL) {
Py_DECREF(*result);
*result = NULL;
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create calls list in async result");
return -1;
}
@@ -979,6 +1103,7 @@ setup_async_result_structure(PyObject **result, PyObject **calls)
Py_DECREF(*result);
*result = NULL;
*calls = NULL;
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to set calls list in async result");
return -1;
}
@@ -987,34 +1112,39 @@ setup_async_result_structure(PyObject **result, PyObject **calls)
static int
add_task_info_to_result(
- RemoteUnwinderObject *self,
+ RemoteUnwinderObject *unwinder,
PyObject *result,
uintptr_t running_task_addr
) {
- PyObject *tn = parse_task_name(self, running_task_addr);
+ PyObject *tn = parse_task_name(unwinder, running_task_addr);
if (tn == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse task name for result");
return -1;
}
if (PyList_Append(result, tn)) {
Py_DECREF(tn);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append task name to result");
return -1;
}
Py_DECREF(tn);
PyObject* awaited_by = PyList_New(0);
if (awaited_by == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by list for result");
return -1;
}
if (PyList_Append(result, awaited_by)) {
Py_DECREF(awaited_by);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append awaited_by to result");
return -1;
}
Py_DECREF(awaited_by);
if (parse_task_awaited_by(
- self, running_task_addr, awaited_by, 1) < 0) {
+ unwinder, running_task_addr, awaited_by, 1) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse awaited_by for result");
return -1;
}
@@ -1034,21 +1164,25 @@ process_single_task_node(
tn = parse_task_name(unwinder, task_addr);
if (tn == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse task name in single task node");
goto error;
}
current_awaited_by = PyList_New(0);
if (current_awaited_by == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by list in single task node");
goto error;
}
task_id = PyLong_FromUnsignedLongLong(task_addr);
if (task_id == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create task ID in single task node");
goto error;
}
result_item = PyTuple_New(3);
if (result_item == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create result tuple in single task node");
goto error;
}
@@ -1063,6 +1197,7 @@ process_single_task_node(
if (PyList_Append(result, result_item)) {
Py_DECREF(result_item);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append result item in single task node");
return -1;
}
Py_DECREF(result_item);
@@ -1070,6 +1205,7 @@ process_single_task_node(
// Get back current_awaited_by reference for parse_task_awaited_by
current_awaited_by = PyTuple_GET_ITEM(result_item, 2);
if (parse_task_awaited_by(unwinder, task_addr, current_awaited_by, 0) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse awaited_by in single task node");
return -1;
}
@@ -1121,20 +1257,22 @@ get_tlbc_cache_entry(RemoteUnwinderObject *self, uintptr_t code_addr, uint32_t c
}
static int
-cache_tlbc_array(RemoteUnwinderObject *self, uintptr_t code_addr, uintptr_t tlbc_array_addr, uint32_t generation)
+cache_tlbc_array(RemoteUnwinderObject *unwinder, uintptr_t code_addr, uintptr_t tlbc_array_addr, uint32_t generation)
{
uintptr_t tlbc_array_ptr;
void *tlbc_array = NULL;
TLBCCacheEntry *entry = NULL;
// Read the TLBC array pointer
- if (read_ptr(&self->handle, tlbc_array_addr, &tlbc_array_ptr) != 0 || tlbc_array_ptr == 0) {
+ if (read_ptr(unwinder, tlbc_array_addr, &tlbc_array_ptr) != 0 || tlbc_array_ptr == 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read TLBC array pointer");
return 0; // No TLBC array
}
// Read the TLBC array size
Py_ssize_t tlbc_size;
- if (_Py_RemoteDebug_PagedReadRemoteMemory(&self->handle, tlbc_array_ptr, sizeof(tlbc_size), &tlbc_size) != 0 || tlbc_size <= 0) {
+ if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, tlbc_array_ptr, sizeof(tlbc_size), &tlbc_size) != 0 || tlbc_size <= 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read TLBC array size");
return 0; // Invalid size
}
@@ -1142,11 +1280,13 @@ cache_tlbc_array(RemoteUnwinderObject *self, uintptr_t code_addr, uintptr_t tlbc
size_t array_data_size = tlbc_size * sizeof(void*);
tlbc_array = PyMem_RawMalloc(sizeof(Py_ssize_t) + array_data_size);
if (!tlbc_array) {
- return -1; // Memory error
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate TLBC array");
+ return 0; // Memory error
}
- if (_Py_RemoteDebug_PagedReadRemoteMemory(&self->handle, tlbc_array_ptr, sizeof(Py_ssize_t) + array_data_size, tlbc_array) != 0) {
+ if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, tlbc_array_ptr, sizeof(Py_ssize_t) + array_data_size, tlbc_array) != 0) {
PyMem_RawFree(tlbc_array);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read TLBC array data");
return 0; // Read error
}
@@ -1154,7 +1294,8 @@ cache_tlbc_array(RemoteUnwinderObject *self, uintptr_t code_addr, uintptr_t tlbc
entry = PyMem_RawMalloc(sizeof(TLBCCacheEntry));
if (!entry) {
PyMem_RawFree(tlbc_array);
- return -1; // Memory error
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate TLBC cache entry");
+ return 0; // Memory error
}
entry->tlbc_array = tlbc_array;
@@ -1163,9 +1304,10 @@ cache_tlbc_array(RemoteUnwinderObject *self, uintptr_t code_addr, uintptr_t tlbc
// Store in cache
void *key = (void *)code_addr;
- if (_Py_hashtable_set(self->tlbc_cache, key, entry) < 0) {
+ if (_Py_hashtable_set(unwinder->tlbc_cache, key, entry) < 0) {
tlbc_cache_entry_destroy(entry);
- return -1; // Cache error
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to store TLBC entry in cache");
+ return 0; // Cache error
}
return 1; // Success
@@ -1304,29 +1446,34 @@ parse_code_object(RemoteUnwinderObject *unwinder,
if (_Py_RemoteDebug_PagedReadRemoteMemory(
&unwinder->handle, real_address, SIZEOF_CODE_OBJ, code_object) < 0)
{
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read code object");
goto error;
}
func = read_py_str(unwinder,
GET_MEMBER(uintptr_t, code_object, unwinder->debug_offsets.code_object.qualname), 1024);
if (!func) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read function name from code object");
goto error;
}
file = read_py_str(unwinder,
GET_MEMBER(uintptr_t, code_object, unwinder->debug_offsets.code_object.filename), 1024);
if (!file) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read filename from code object");
goto error;
}
linetable = read_py_bytes(unwinder,
GET_MEMBER(uintptr_t, code_object, unwinder->debug_offsets.code_object.linetable), 4096);
if (!linetable) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read linetable from code object");
goto error;
}
meta = PyMem_RawMalloc(sizeof(CachedCodeMetadata));
if (!meta) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate cached code metadata");
goto error;
}
@@ -1338,6 +1485,7 @@ parse_code_object(RemoteUnwinderObject *unwinder,
if (unwinder && unwinder->code_object_cache && _Py_hashtable_set(unwinder->code_object_cache, key, meta) < 0) {
cached_code_metadata_destroy(meta);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to cache code metadata");
goto error;
}
@@ -1363,9 +1511,11 @@ parse_code_object(RemoteUnwinderObject *unwinder,
if (!tlbc_entry) {
// Cache miss - try to read and cache TLBC array
- if (cache_tlbc_array(unwinder, real_address, real_address + unwinder->debug_offsets.code_object.co_tlbc, unwinder->tlbc_generation) > 0) {
- tlbc_entry = get_tlbc_cache_entry(unwinder, real_address, unwinder->tlbc_generation);
+ if (!cache_tlbc_array(unwinder, real_address, real_address + unwinder->debug_offsets.code_object.co_tlbc, unwinder->tlbc_generation)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to cache TLBC array");
+ goto error;
}
+ tlbc_entry = get_tlbc_cache_entry(unwinder, real_address, unwinder->tlbc_generation);
}
if (tlbc_entry && tlbc_index < tlbc_entry->tlbc_array_size) {
@@ -1400,19 +1550,21 @@ done_tlbc:
lineno = PyLong_FromLong(info.lineno);
if (!lineno) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create line number object");
goto error;
}
tuple = PyTuple_New(3);
if (!tuple) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create result tuple for code object");
goto error;
}
Py_INCREF(meta->func_name);
Py_INCREF(meta->file_name);
- PyTuple_SET_ITEM(tuple, 0, meta->func_name);
- PyTuple_SET_ITEM(tuple, 1, meta->file_name);
- PyTuple_SET_ITEM(tuple, 2, lineno);
+ PyTuple_SET_ITEM(tuple, 0, meta->file_name);
+ PyTuple_SET_ITEM(tuple, 1, lineno);
+ PyTuple_SET_ITEM(tuple, 2, meta->func_name);
*result = tuple;
return 0;
@@ -1441,7 +1593,7 @@ cleanup_stack_chunks(StackChunkList *chunks)
static int
process_single_stack_chunk(
- proc_handle_t *handle,
+ RemoteUnwinderObject *unwinder,
uintptr_t chunk_addr,
StackChunkInfo *chunk_info
) {
@@ -1451,11 +1603,13 @@ process_single_stack_chunk(
char *this_chunk = PyMem_RawMalloc(current_size);
if (!this_chunk) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate stack chunk buffer");
return -1;
}
- if (_Py_RemoteDebug_PagedReadRemoteMemory(handle, chunk_addr, current_size, this_chunk) < 0) {
+ if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, chunk_addr, current_size, this_chunk) < 0) {
PyMem_RawFree(this_chunk);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read stack chunk");
return -1;
}
@@ -1465,11 +1619,13 @@ process_single_stack_chunk(
this_chunk = PyMem_RawRealloc(this_chunk, actual_size);
if (!this_chunk) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to reallocate stack chunk buffer");
return -1;
}
- if (_Py_RemoteDebug_PagedReadRemoteMemory(handle, chunk_addr, actual_size, this_chunk) < 0) {
+ if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, chunk_addr, actual_size, this_chunk) < 0) {
PyMem_RawFree(this_chunk);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to reread stack chunk with correct size");
return -1;
}
current_size = actual_size;
@@ -1491,13 +1647,15 @@ copy_stack_chunks(RemoteUnwinderObject *unwinder,
size_t count = 0;
size_t max_chunks = 16;
- if (read_ptr(&unwinder->handle, tstate_addr + unwinder->debug_offsets.thread_state.datastack_chunk, &chunk_addr)) {
+ if (read_ptr(unwinder, tstate_addr + unwinder->debug_offsets.thread_state.datastack_chunk, &chunk_addr)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read initial stack chunk address");
return -1;
}
chunks = PyMem_RawMalloc(max_chunks * sizeof(StackChunkInfo));
if (!chunks) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to allocate stack chunks array");
return -1;
}
@@ -1508,13 +1666,15 @@ copy_stack_chunks(RemoteUnwinderObject *unwinder,
StackChunkInfo *new_chunks = PyMem_RawRealloc(chunks, max_chunks * sizeof(StackChunkInfo));
if (!new_chunks) {
PyErr_NoMemory();
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to grow stack chunks array");
goto error;
}
chunks = new_chunks;
}
// Process this chunk
- if (process_single_stack_chunk(&unwinder->handle, chunk_addr, &chunks[count]) < 0) {
+ if (process_single_stack_chunk(unwinder, chunk_addr, &chunks[count]) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to process stack chunk");
goto error;
}
@@ -1559,6 +1719,7 @@ parse_frame_from_chunks(
) {
void *frame_ptr = find_frame_in_chunks(chunks, address);
if (!frame_ptr) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Frame not found in stack chunks");
return -1;
}
@@ -1607,11 +1768,13 @@ populate_initial_state_data(
sizeof(void*),
&address_of_interpreter_state);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read interpreter state address");
return -1;
}
if (address_of_interpreter_state == 0) {
PyErr_SetString(PyExc_RuntimeError, "No interpreter state found");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Interpreter state is NULL");
return -1;
}
@@ -1630,6 +1793,7 @@ populate_initial_state_data(
address_of_thread,
sizeof(void*),
tstate) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read main thread state address");
return -1;
}
@@ -1652,11 +1816,13 @@ find_running_frame(
sizeof(void*),
&address_of_interpreter_state);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read interpreter state for running frame");
return -1;
}
if (address_of_interpreter_state == 0) {
PyErr_SetString(PyExc_RuntimeError, "No interpreter state found");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Interpreter state is NULL in running frame search");
return -1;
}
@@ -1668,16 +1834,18 @@ find_running_frame(
sizeof(void*),
&address_of_thread);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read thread address for running frame");
return -1;
}
// No Python frames are available for us (can happen at tear-down).
if ((void*)address_of_thread != NULL) {
int err = read_ptr(
- &unwinder->handle,
+ unwinder,
address_of_thread + unwinder->debug_offsets.thread_state.current_frame,
frame);
if (err) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read current frame pointer");
return -1;
}
return 0;
@@ -1704,11 +1872,13 @@ find_running_task(
sizeof(void*),
&address_of_interpreter_state);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read interpreter state for running task");
return -1;
}
if (address_of_interpreter_state == 0) {
PyErr_SetString(PyExc_RuntimeError, "No interpreter state found");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Interpreter state is NULL in running task search");
return -1;
}
@@ -1720,6 +1890,7 @@ find_running_task(
sizeof(void*),
&address_of_thread);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read thread head for running task");
return -1;
}
@@ -1730,11 +1901,12 @@ find_running_task(
}
bytes_read = read_py_ptr(
- &unwinder->handle,
+ unwinder,
address_of_thread
+ unwinder->async_debug_offsets.asyncio_thread_state.asyncio_running_loop,
&address_of_running_loop);
if (bytes_read == -1) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read running loop address");
return -1;
}
@@ -1744,11 +1916,12 @@ find_running_task(
}
int err = read_ptr(
- &unwinder->handle,
+ unwinder,
address_of_thread
+ unwinder->async_debug_offsets.asyncio_thread_state.asyncio_running_task,
running_task_addr);
if (err) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read running task address");
return -1;
}
@@ -1757,47 +1930,51 @@ find_running_task(
static int
find_running_task_and_coro(
- RemoteUnwinderObject *self,
+ RemoteUnwinderObject *unwinder,
uintptr_t *running_task_addr,
uintptr_t *running_coro_addr,
uintptr_t *running_task_code_obj
) {
*running_task_addr = (uintptr_t)NULL;
if (find_running_task(
- self, running_task_addr) < 0) {
- chain_exceptions(PyExc_RuntimeError, "Failed to find running task");
+ unwinder, running_task_addr) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Running task search failed");
return -1;
}
if ((void*)*running_task_addr == NULL) {
PyErr_SetString(PyExc_RuntimeError, "No running task found");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Running task address is NULL");
return -1;
}
if (read_py_ptr(
- &self->handle,
- *running_task_addr + self->async_debug_offsets.asyncio_task_object.task_coro,
+ unwinder,
+ *running_task_addr + unwinder->async_debug_offsets.asyncio_task_object.task_coro,
running_coro_addr) < 0) {
- chain_exceptions(PyExc_RuntimeError, "Failed to read running task coro");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Running task coro read failed");
return -1;
}
if ((void*)*running_coro_addr == NULL) {
PyErr_SetString(PyExc_RuntimeError, "Running task coro is NULL");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Running task coro address is NULL");
return -1;
}
// note: genobject's gi_iframe is an embedded struct so the address to
// the offset leads directly to its first field: f_executable
if (read_py_ptr(
- &self->handle,
- *running_coro_addr + self->debug_offsets.gen_object.gi_iframe,
+ unwinder,
+ *running_coro_addr + unwinder->debug_offsets.gen_object.gi_iframe,
running_task_code_obj) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read running task code object");
return -1;
}
if ((void*)*running_task_code_obj == NULL) {
PyErr_SetString(PyExc_RuntimeError, "Running task code object is NULL");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Running task code object address is NULL");
return -1;
}
@@ -1825,6 +2002,7 @@ parse_frame_object(
frame
);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read interpreter frame");
return -1;
}
@@ -1870,11 +2048,20 @@ parse_async_frame_object(
frame
);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read async frame");
return -1;
}
*previous_frame = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.previous);
+ *code_object = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable);
+ // Strip tag bits for consistent comparison
+ *code_object &= ~Py_TAG_BITS;
+ assert(code_object != NULL);
+ if ((void*)*code_object == NULL) {
+ return 0;
+ }
+
if (GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) == FRAME_OWNED_BY_CSTACK ||
GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) == FRAME_OWNED_BY_INTERPRETER) {
return 0; // C frame
@@ -1884,18 +2071,10 @@ parse_async_frame_object(
&& GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner) != FRAME_OWNED_BY_THREAD) {
PyErr_Format(PyExc_RuntimeError, "Unhandled frame owner %d.\n",
GET_MEMBER(char, frame, unwinder->debug_offsets.interpreter_frame.owner));
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Unhandled frame owner type in async frame");
return -1;
}
- *code_object = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.executable);
- // Strip tag bits for consistent comparison
- *code_object &= ~Py_TAG_BITS;
-
- assert(code_object != NULL);
- if ((void*)*code_object == NULL) {
- return 0;
- }
-
uintptr_t instruction_pointer = GET_MEMBER(uintptr_t, frame, unwinder->debug_offsets.interpreter_frame.instr_ptr);
// Get tlbc_index for free threading builds
@@ -1908,6 +2087,7 @@ parse_async_frame_object(
if (parse_code_object(
unwinder, result, *code_object, instruction_pointer, previous_frame, tlbc_index)) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse code object in async frame");
return -1;
}
@@ -1916,13 +2096,13 @@ parse_async_frame_object(
static int
parse_async_frame_chain(
- RemoteUnwinderObject *self,
+ RemoteUnwinderObject *unwinder,
PyObject *calls,
uintptr_t running_task_code_obj
) {
uintptr_t address_of_current_frame;
- if (find_running_frame(self, self->runtime_start_address, &address_of_current_frame) < 0) {
- chain_exceptions(PyExc_RuntimeError, "Failed to find running frame");
+ if (find_running_frame(unwinder, unwinder->runtime_start_address, &address_of_current_frame) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Running frame search failed in async chain");
return -1;
}
@@ -1930,7 +2110,7 @@ parse_async_frame_chain(
while ((void*)address_of_current_frame != NULL) {
PyObject* frame_info = NULL;
int res = parse_async_frame_object(
- self,
+ unwinder,
&frame_info,
address_of_current_frame,
&address_of_current_frame,
@@ -1938,7 +2118,7 @@ parse_async_frame_chain(
);
if (res < 0) {
- chain_exceptions(PyExc_RuntimeError, "Failed to parse async frame object");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Async frame object parsing failed in chain");
return -1;
}
@@ -1948,6 +2128,7 @@ parse_async_frame_chain(
if (PyList_Append(calls, frame_info) == -1) {
Py_DECREF(frame_info);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append frame info to async chain");
return -1;
}
@@ -1975,6 +2156,7 @@ append_awaited_by_for_thread(
if (_Py_RemoteDebug_PagedReadRemoteMemory(&unwinder->handle, head_addr,
sizeof(task_node), task_node) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read task node head");
return -1;
}
@@ -1984,12 +2166,14 @@ append_awaited_by_for_thread(
while (GET_MEMBER(uintptr_t, task_node, unwinder->debug_offsets.llist_node.next) != head_addr) {
if (++iteration_count > MAX_ITERATIONS) {
PyErr_SetString(PyExc_RuntimeError, "Task list appears corrupted");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Task list iteration limit exceeded");
return -1;
}
if (GET_MEMBER(uintptr_t, task_node, unwinder->debug_offsets.llist_node.next) == 0) {
PyErr_SetString(PyExc_RuntimeError,
"Invalid linked list structure reading remote memory");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "NULL pointer in task linked list");
return -1;
}
@@ -1997,6 +2181,7 @@ append_awaited_by_for_thread(
- unwinder->async_debug_offsets.asyncio_task_object.task_node;
if (process_single_task_node(unwinder, task_addr, result) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to process task node in awaited_by");
return -1;
}
@@ -2006,6 +2191,7 @@ append_awaited_by_for_thread(
(uintptr_t)GET_MEMBER(uintptr_t, task_node, unwinder->debug_offsets.llist_node.next),
sizeof(task_node),
task_node) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read next task node in awaited_by");
return -1;
}
}
@@ -2022,12 +2208,14 @@ append_awaited_by(
{
PyObject *tid_py = PyLong_FromUnsignedLong(tid);
if (tid_py == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create thread ID object");
return -1;
}
PyObject *result_item = PyTuple_New(2);
if (result_item == NULL) {
Py_DECREF(tid_py);
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by result tuple");
return -1;
}
@@ -2035,6 +2223,7 @@ append_awaited_by(
if (awaited_by_for_thread == NULL) {
Py_DECREF(tid_py);
Py_DECREF(result_item);
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create awaited_by thread list");
return -1;
}
@@ -2042,12 +2231,14 @@ append_awaited_by(
PyTuple_SET_ITEM(result_item, 1, awaited_by_for_thread); // steals ref
if (PyList_Append(result, result_item)) {
Py_DECREF(result_item);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append awaited_by result item");
return -1;
}
Py_DECREF(result_item);
if (append_awaited_by_for_thread(unwinder, head_addr, awaited_by_for_thread))
{
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append awaited_by for thread");
return -1;
}
@@ -2076,6 +2267,7 @@ process_frame_chain(
if (++frame_count > MAX_FRAMES) {
PyErr_SetString(PyExc_RuntimeError, "Too many stack frames (possible infinite loop)");
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Frame chain iteration limit exceeded");
return -1;
}
@@ -2083,6 +2275,7 @@ process_frame_chain(
if (parse_frame_from_chunks(unwinder, &frame, frame_addr, &next_frame_addr, chunks) < 0) {
PyErr_Clear();
if (parse_frame_object(unwinder, &frame, frame_addr, &next_frame_addr) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to parse frame object in chain");
return -1;
}
}
@@ -2096,11 +2289,13 @@ process_frame_chain(
"Broken frame chain: expected frame at 0x%lx, got 0x%lx",
prev_frame_addr, frame_addr);
Py_DECREF(frame);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Frame chain consistency check failed");
return -1;
}
if (PyList_Append(frame_info, frame) == -1) {
Py_DECREF(frame);
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to append frame to frame info list");
return -1;
}
Py_DECREF(frame);
@@ -2126,6 +2321,7 @@ unwind_stack_for_thread(
int bytes_read = _Py_RemoteDebug_PagedReadRemoteMemory(
&unwinder->handle, *current_tstate, unwinder->debug_offsets.thread_state.size, ts);
if (bytes_read < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to read thread state");
goto error;
}
@@ -2133,14 +2329,17 @@ unwind_stack_for_thread(
frame_info = PyList_New(0);
if (!frame_info) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create frame info list");
goto error;
}
if (copy_stack_chunks(unwinder, *current_tstate, &chunks) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to copy stack chunks");
goto error;
}
if (process_frame_chain(unwinder, frame_addr, &chunks, frame_info) < 0) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to process frame chain");
goto error;
}
@@ -2149,11 +2348,13 @@ unwind_stack_for_thread(
thread_id = PyLong_FromLongLong(
GET_MEMBER(long, ts, unwinder->debug_offsets.thread_state.native_thread_id));
if (thread_id == NULL) {
+ set_exception_cause(unwinder, PyExc_RuntimeError, "Failed to create thread ID");
goto error;
}
result = PyTuple_New(2);
if (result == NULL) {
+ set_exception_cause(unwinder, PyExc_MemoryError, "Failed to create thread unwind result tuple");
goto error;
}
@@ -2186,6 +2387,7 @@ _remote_debugging.RemoteUnwinder.__init__
pid: int
*
all_threads: bool = False
+ debug: bool = False
Initialize a new RemoteUnwinder object for debugging a remote Python process.
@@ -2193,6 +2395,8 @@ Args:
pid: Process ID of the target Python process to debug
all_threads: If True, initialize state for all threads in the process.
If False, only initialize for the main thread.
+ debug: If True, chain exceptions to explain the sequence of events that
+ lead to the exception.
The RemoteUnwinder provides functionality to inspect and debug a running Python
process, including examining thread states, stack frames and other runtime data.
@@ -2205,15 +2409,19 @@ Raises:
static int
_remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
- int pid, int all_threads)
-/*[clinic end generated code: output=b8027cb247092081 input=6a2056b04e6f050e]*/
+ int pid, int all_threads,
+ int debug)
+/*[clinic end generated code: output=3982f2a7eba49334 input=48a762566b828e91]*/
{
+ self->debug = debug;
if (_Py_RemoteDebug_InitProcHandle(&self->handle, pid) < 0) {
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to initialize process handle");
return -1;
}
self->runtime_start_address = _Py_RemoteDebug_GetPyRuntimeAddress(&self->handle);
if (self->runtime_start_address == 0) {
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to get Python runtime address");
return -1;
}
@@ -2221,6 +2429,13 @@ _remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
&self->runtime_start_address,
&self->debug_offsets) < 0)
{
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to read debug offsets");
+ return -1;
+ }
+
+ // Validate that the debug offsets are valid
+ if(validate_debug_offsets(&self->debug_offsets) == -1) {
+ set_exception_cause(self, PyExc_RuntimeError, "Invalid debug offsets found");
return -1;
}
@@ -2235,6 +2450,7 @@ _remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
if (populate_initial_state_data(all_threads, self, self->runtime_start_address,
&self->interpreter_addr ,&self->tstate_addr) < 0)
{
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to populate initial state data");
return -1;
}
@@ -2247,6 +2463,7 @@ _remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
);
if (self->code_object_cache == NULL) {
PyErr_NoMemory();
+ set_exception_cause(self, PyExc_MemoryError, "Failed to create code object cache");
return -1;
}
@@ -2263,6 +2480,7 @@ _remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
if (self->tlbc_cache == NULL) {
_Py_hashtable_destroy(self->code_object_cache);
PyErr_NoMemory();
+ set_exception_cause(self, PyExc_MemoryError, "Failed to create TLBC cache");
return -1;
}
#endif
@@ -2314,6 +2532,7 @@ _remote_debugging_RemoteUnwinder_get_stack_trace_impl(RemoteUnwinderObject *self
self->interpreter_addr,
INTERP_STATE_BUFFER_SIZE,
interp_state_buffer) < 0) {
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to read interpreter state buffer");
goto exit;
}
@@ -2347,6 +2566,7 @@ _remote_debugging_RemoteUnwinder_get_stack_trace_impl(RemoteUnwinderObject *self
result = PyList_New(0);
if (!result) {
+ set_exception_cause(self, PyExc_MemoryError, "Failed to create stack trace result list");
goto exit;
}
@@ -2354,12 +2574,14 @@ _remote_debugging_RemoteUnwinder_get_stack_trace_impl(RemoteUnwinderObject *self
PyObject* frame_info = unwind_stack_for_thread(self, &current_tstate);
if (!frame_info) {
Py_CLEAR(result);
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to unwind stack for thread");
goto exit;
}
if (PyList_Append(result, frame_info) == -1) {
Py_DECREF(frame_info);
Py_CLEAR(result);
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to append thread frame info");
goto exit;
}
Py_DECREF(frame_info);
@@ -2425,11 +2647,13 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
{
if (!self->async_debug_offsets_available) {
PyErr_SetString(PyExc_RuntimeError, "AsyncioDebug section not available");
+ set_exception_cause(self, PyExc_RuntimeError, "AsyncioDebug section unavailable in get_all_awaited_by");
return NULL;
}
PyObject *result = PyList_New(0);
if (result == NULL) {
+ set_exception_cause(self, PyExc_MemoryError, "Failed to create awaited_by result list");
goto result_err;
}
@@ -2442,6 +2666,7 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
sizeof(void*),
&thread_state_addr))
{
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to read main thread state in get_all_awaited_by");
goto result_err;
}
@@ -2454,6 +2679,7 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
sizeof(tid),
&tid))
{
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to read thread ID in get_all_awaited_by");
goto result_err;
}
@@ -2462,6 +2688,7 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
if (append_awaited_by(self, tid, head_addr, result))
{
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to append awaited_by for thread in get_all_awaited_by");
goto result_err;
}
@@ -2471,6 +2698,7 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
sizeof(void*),
&thread_state_addr))
{
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to read next thread state in get_all_awaited_by");
goto result_err;
}
}
@@ -2485,6 +2713,7 @@ _remote_debugging_RemoteUnwinder_get_all_awaited_by_impl(RemoteUnwinderObject *s
// interesting for debugging.
if (append_awaited_by(self, 0, head_addr, result))
{
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to append interpreter awaited_by in get_all_awaited_by");
goto result_err;
}
@@ -2530,27 +2759,32 @@ _remote_debugging_RemoteUnwinder_get_async_stack_trace_impl(RemoteUnwinderObject
{
if (!self->async_debug_offsets_available) {
PyErr_SetString(PyExc_RuntimeError, "AsyncioDebug section not available");
+ set_exception_cause(self, PyExc_RuntimeError, "AsyncioDebug section unavailable in get_async_stack_trace");
return NULL;
}
PyObject *result = NULL;
PyObject *calls = NULL;
- if (setup_async_result_structure(&result, &calls) < 0) {
+ if (setup_async_result_structure(self, &result, &calls) < 0) {
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to setup async result structure");
goto cleanup;
}
uintptr_t running_task_addr, running_coro_addr, running_task_code_obj;
if (find_running_task_and_coro(self, &running_task_addr,
&running_coro_addr, &running_task_code_obj) < 0) {
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to find running task and coro");
goto cleanup;
}
if (parse_async_frame_chain(self, calls, running_task_code_obj) < 0) {
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to parse async frame chain");
goto cleanup;
}
if (add_task_info_to_result(self, result, running_task_addr) < 0) {
+ set_exception_cause(self, PyExc_RuntimeError, "Failed to add task info to result");
goto cleanup;
}
@@ -2688,4 +2922,3 @@ PyInit__remote_debugging(void)
{
return PyModuleDef_Init(&remote_debugging_module);
}
-
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index 976da1340ec..014e624f6c2 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -563,7 +563,7 @@ fill_and_set_sslerror(_sslmodulestate *state,
goto fail;
}
}
- if (PyUnicodeWriter_WriteUTF8(writer, "] ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, "] ", 2) < 0) {
goto fail;
}
}
diff --git a/Modules/_testcapi/unicode.c b/Modules/_testcapi/unicode.c
index b8ecf53f4f8..e70f5c68bc3 100644
--- a/Modules/_testcapi/unicode.c
+++ b/Modules/_testcapi/unicode.c
@@ -333,6 +333,27 @@ writer_write_utf8(PyObject *self_raw, PyObject *args)
static PyObject*
+writer_write_ascii(PyObject *self_raw, PyObject *args)
+{
+ WriterObject *self = (WriterObject *)self_raw;
+ if (writer_check(self) < 0) {
+ return NULL;
+ }
+
+ char *str;
+ Py_ssize_t size;
+ if (!PyArg_ParseTuple(args, "yn", &str, &size)) {
+ return NULL;
+ }
+
+ if (PyUnicodeWriter_WriteASCII(self->writer, str, size) < 0) {
+ return NULL;
+ }
+ Py_RETURN_NONE;
+}
+
+
+static PyObject*
writer_write_widechar(PyObject *self_raw, PyObject *args)
{
WriterObject *self = (WriterObject *)self_raw;
@@ -513,6 +534,7 @@ writer_finish(PyObject *self_raw, PyObject *Py_UNUSED(args))
static PyMethodDef writer_methods[] = {
{"write_char", _PyCFunction_CAST(writer_write_char), METH_VARARGS},
{"write_utf8", _PyCFunction_CAST(writer_write_utf8), METH_VARARGS},
+ {"write_ascii", _PyCFunction_CAST(writer_write_ascii), METH_VARARGS},
{"write_widechar", _PyCFunction_CAST(writer_write_widechar), METH_VARARGS},
{"write_ucs4", _PyCFunction_CAST(writer_write_ucs4), METH_VARARGS},
{"write_str", _PyCFunction_CAST(writer_write_str), METH_VARARGS},
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 3aa6e4c9e43..281c5b41137 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -3175,6 +3175,48 @@ create_manual_heap_type(void)
return (PyObject *)type;
}
+typedef struct {
+ PyObject_VAR_HEAD
+} ManagedDictObject;
+
+int ManagedDict_traverse(PyObject *self, visitproc visit, void *arg) {
+ PyObject_VisitManagedDict(self, visit, arg);
+ Py_VISIT(Py_TYPE(self));
+ return 0;
+}
+
+int ManagedDict_clear(PyObject *self) {
+ PyObject_ClearManagedDict(self);
+ return 0;
+}
+
+static PyGetSetDef ManagedDict_getset[] = {
+ {"__dict__", PyObject_GenericGetDict, PyObject_GenericSetDict, NULL, NULL},
+ {NULL, NULL, NULL, NULL, NULL},
+};
+
+static PyType_Slot ManagedDict_slots[] = {
+ {Py_tp_new, (void *)PyType_GenericNew},
+ {Py_tp_getset, (void *)ManagedDict_getset},
+ {Py_tp_traverse, (void *)ManagedDict_traverse},
+ {Py_tp_clear, (void *)ManagedDict_clear},
+ {0}
+};
+
+static PyType_Spec ManagedDict_spec = {
+ "_testcapi.ManagedDictType",
+ sizeof(ManagedDictObject),
+ 0, // itemsize
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_MANAGED_DICT | Py_TPFLAGS_HEAPTYPE | Py_TPFLAGS_HAVE_GC,
+ ManagedDict_slots
+};
+
+static PyObject *
+create_managed_dict_type(void)
+{
+ return PyType_FromSpec(&ManagedDict_spec);
+}
+
static struct PyModuleDef _testcapimodule = {
PyModuleDef_HEAD_INIT,
.m_name = "_testcapi",
@@ -3315,6 +3357,13 @@ PyInit__testcapi(void)
return NULL;
}
+ PyObject *managed_dict_type = create_managed_dict_type();
+ if (managed_dict_type == NULL) {
+ return NULL;
+ }
+ if (PyModule_Add(m, "ManagedDictType", managed_dict_type) < 0) {
+ return NULL;
+ }
/* Include tests from the _testcapi/ directory */
if (_PyTestCapi_Init_Vectorcall(m) < 0) {
diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c
index 136e6a7a015..845c218e679 100644
--- a/Modules/_testinternalcapi.c
+++ b/Modules/_testinternalcapi.c
@@ -1045,6 +1045,9 @@ get_code_var_counts(PyObject *self, PyObject *_args, PyObject *_kwargs)
#define SET_COUNT(DICT, STRUCT, NAME) \
do { \
PyObject *count = PyLong_FromLong(STRUCT.NAME); \
+ if (count == NULL) { \
+ goto error; \
+ } \
int res = PyDict_SetItemString(DICT, #NAME, count); \
Py_DECREF(count); \
if (res < 0) { \
diff --git a/Modules/_testlimitedcapi/sys.c b/Modules/_testlimitedcapi/sys.c
index 7d8b7a8569e..cec7f8ab612 100644
--- a/Modules/_testlimitedcapi/sys.c
+++ b/Modules/_testlimitedcapi/sys.c
@@ -1,8 +1,77 @@
+#include "pyconfig.h" // Py_GIL_DISABLED
+// Need limited C API version 3.15 for PySys_GetAttr() etc
+#if !defined(Py_GIL_DISABLED) && !defined(Py_LIMITED_API)
+# define Py_LIMITED_API 0x030f0000
+#endif
#include "parts.h"
#include "util.h"
static PyObject *
+sys_getattr(PyObject *Py_UNUSED(module), PyObject *name)
+{
+ NULLABLE(name);
+ return PySys_GetAttr(name);
+}
+
+static PyObject *
+sys_getattrstring(PyObject *Py_UNUSED(module), PyObject *arg)
+{
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_Parse(arg, "z#", &name, &size)) {
+ return NULL;
+ }
+ return PySys_GetAttrString(name);
+}
+
+static PyObject *
+sys_getoptionalattr(PyObject *Py_UNUSED(module), PyObject *name)
+{
+ PyObject *value = UNINITIALIZED_PTR;
+ NULLABLE(name);
+
+ switch (PySys_GetOptionalAttr(name, &value)) {
+ case -1:
+ assert(value == NULL);
+ assert(PyErr_Occurred());
+ return NULL;
+ case 0:
+ assert(value == NULL);
+ return Py_NewRef(PyExc_AttributeError);
+ case 1:
+ return value;
+ default:
+ Py_FatalError("PySys_GetOptionalAttr() returned invalid code");
+ }
+}
+
+static PyObject *
+sys_getoptionalattrstring(PyObject *Py_UNUSED(module), PyObject *arg)
+{
+ PyObject *value = UNINITIALIZED_PTR;
+ const char *name;
+ Py_ssize_t size;
+ if (!PyArg_Parse(arg, "z#", &name, &size)) {
+ return NULL;
+ }
+
+ switch (PySys_GetOptionalAttrString(name, &value)) {
+ case -1:
+ assert(value == NULL);
+ assert(PyErr_Occurred());
+ return NULL;
+ case 0:
+ assert(value == NULL);
+ return Py_NewRef(PyExc_AttributeError);
+ case 1:
+ return value;
+ default:
+ Py_FatalError("PySys_GetOptionalAttrString() returned invalid code");
+ }
+}
+
+static PyObject *
sys_getobject(PyObject *Py_UNUSED(module), PyObject *arg)
{
const char *name;
@@ -39,6 +108,10 @@ sys_getxoptions(PyObject *Py_UNUSED(module), PyObject *Py_UNUSED(ignored))
static PyMethodDef test_methods[] = {
+ {"sys_getattr", sys_getattr, METH_O},
+ {"sys_getattrstring", sys_getattrstring, METH_O},
+ {"sys_getoptionalattr", sys_getoptionalattr, METH_O},
+ {"sys_getoptionalattrstring", sys_getoptionalattrstring, METH_O},
{"sys_getobject", sys_getobject, METH_O},
{"sys_setobject", sys_setobject, METH_VARARGS},
{"sys_getxoptions", sys_getxoptions, METH_NOARGS},
diff --git a/Modules/_threadmodule.c b/Modules/_threadmodule.c
index cc83be4b5ff..150a266b521 100644
--- a/Modules/_threadmodule.c
+++ b/Modules/_threadmodule.c
@@ -10,7 +10,6 @@
#include "pycore_object_deferred.h" // _PyObject_SetDeferredRefcount()
#include "pycore_pylifecycle.h"
#include "pycore_pystate.h" // _PyThreadState_SetCurrent()
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttr()
#include "pycore_time.h" // _PyTime_FromSeconds()
#include "pycore_weakref.h" // _PyWeakref_GET_REF()
@@ -2290,7 +2289,7 @@ thread_excepthook(PyObject *module, PyObject *args)
PyObject *thread = PyStructSequence_GET_ITEM(args, 3);
PyObject *file;
- if (_PySys_GetOptionalAttr( &_Py_ID(stderr), &file) < 0) {
+ if (PySys_GetOptionalAttr( &_Py_ID(stderr), &file) < 0) {
return NULL;
}
if (file == NULL || file == Py_None) {
diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c
index 77695401919..875840bd6a6 100644
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -31,7 +31,6 @@ Copyright (C) 1994 Steen Lumholt.
#endif
#include "pycore_long.h" // _PyLong_IsNegative()
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttrString()
#include "pycore_unicodeobject.h" // _PyUnicode_AsUTF8String
#ifdef MS_WINDOWS
@@ -146,7 +145,7 @@ _get_tcl_lib_path(void)
int stat_return_value;
PyObject *prefix;
- (void) _PySys_GetOptionalAttrString("base_prefix", &prefix);
+ (void) PySys_GetOptionalAttrString("base_prefix", &prefix);
if (prefix == NULL) {
return NULL;
}
@@ -3547,7 +3546,7 @@ PyInit__tkinter(void)
/* This helps the dynamic loader; in Unicode aware Tcl versions
it also helps Tcl find its encodings. */
- (void) _PySys_GetOptionalAttrString("executable", &uexe);
+ (void) PySys_GetOptionalAttrString("executable", &uexe);
if (uexe && PyUnicode_Check(uexe)) { // sys.executable can be None
cexe = PyUnicode_EncodeFSDefault(uexe);
Py_DECREF(uexe);
diff --git a/Modules/_uuidmodule.c b/Modules/_uuidmodule.c
index c5e78b1510b..c31a7e8fea5 100644
--- a/Modules/_uuidmodule.c
+++ b/Modules/_uuidmodule.c
@@ -78,23 +78,47 @@ py_UuidCreate(PyObject *Py_UNUSED(context),
return NULL;
}
+static int
+py_windows_has_stable_node(void)
+{
+ UUID uuid;
+ RPC_STATUS res;
+ Py_BEGIN_ALLOW_THREADS
+ res = UuidCreateSequential(&uuid);
+ Py_END_ALLOW_THREADS
+ return res == RPC_S_OK;
+}
#endif /* MS_WINDOWS */
static int
-uuid_exec(PyObject *module) {
+uuid_exec(PyObject *module)
+{
+#define ADD_INT(NAME, VALUE) \
+ do { \
+ if (PyModule_AddIntConstant(module, (NAME), (VALUE)) < 0) { \
+ return -1; \
+ } \
+ } while (0)
+
assert(sizeof(uuid_t) == 16);
#if defined(MS_WINDOWS)
- int has_uuid_generate_time_safe = 0;
+ ADD_INT("has_uuid_generate_time_safe", 0);
#elif defined(HAVE_UUID_GENERATE_TIME_SAFE)
- int has_uuid_generate_time_safe = 1;
+ ADD_INT("has_uuid_generate_time_safe", 1);
#else
- int has_uuid_generate_time_safe = 0;
+ ADD_INT("has_uuid_generate_time_safe", 0);
#endif
- if (PyModule_AddIntConstant(module, "has_uuid_generate_time_safe",
- has_uuid_generate_time_safe) < 0) {
- return -1;
- }
+
+#if defined(MS_WINDOWS)
+ ADD_INT("has_stable_extractable_node", py_windows_has_stable_node());
+#elif defined(HAVE_UUID_GENERATE_TIME_SAFE_STABLE_MAC)
+ ADD_INT("has_stable_extractable_node", 1);
+#else
+ ADD_INT("has_stable_extractable_node", 0);
+#endif
+
+#undef ADD_INT
return 0;
}
diff --git a/Modules/_winapi.c b/Modules/_winapi.c
index 044505fab62..b4cfbebcb1b 100644
--- a/Modules/_winapi.c
+++ b/Modules/_winapi.c
@@ -2748,6 +2748,19 @@ _winapi_GetACP_impl(PyObject *module)
}
/*[clinic input]
+_winapi.GetOEMCP
+
+Get the current Windows ANSI code page identifier.
+[clinic start generated code]*/
+
+static PyObject *
+_winapi_GetOEMCP_impl(PyObject *module)
+/*[clinic end generated code: output=4def5b07a8be1b3b input=e8caf4353a28e28e]*/
+{
+ return PyLong_FromUnsignedLong(GetOEMCP());
+}
+
+/*[clinic input]
_winapi.GetFileType -> DWORD
handle: HANDLE
@@ -3007,6 +3020,7 @@ static PyMethodDef winapi_functions[] = {
_WINAPI_WAITFORSINGLEOBJECT_METHODDEF
_WINAPI_WRITEFILE_METHODDEF
_WINAPI_GETACP_METHODDEF
+ _WINAPI_GETOEMCP_METHODDEF
_WINAPI_GETFILETYPE_METHODDEF
_WINAPI__MIMETYPES_READ_WINDOWS_REGISTRY_METHODDEF
_WINAPI_NEEDCURRENTDIRECTORYFOREXEPATH_METHODDEF
diff --git a/Modules/_zstd/_zstdmodule.c b/Modules/_zstd/_zstdmodule.c
index 56ad999e5cd..b0e50f873f4 100644
--- a/Modules/_zstd/_zstdmodule.c
+++ b/Modules/_zstd/_zstdmodule.c
@@ -7,7 +7,6 @@
#include "Python.h"
#include "_zstdmodule.h"
-#include "zstddict.h"
#include <zstd.h> // ZSTD_*()
#include <zdict.h> // ZDICT_*()
@@ -20,14 +19,52 @@ module _zstd
#include "clinic/_zstdmodule.c.h"
+ZstdDict *
+_Py_parse_zstd_dict(const _zstd_state *state, PyObject *dict, int *ptype)
+{
+ if (state == NULL) {
+ return NULL;
+ }
+
+ /* Check ZstdDict */
+ if (PyObject_TypeCheck(dict, state->ZstdDict_type)) {
+ return (ZstdDict*)dict;
+ }
+
+ /* Check (ZstdDict, type) */
+ if (PyTuple_CheckExact(dict) && PyTuple_GET_SIZE(dict) == 2
+ && PyObject_TypeCheck(PyTuple_GET_ITEM(dict, 0), state->ZstdDict_type)
+ && PyLong_Check(PyTuple_GET_ITEM(dict, 1)))
+ {
+ int type = PyLong_AsInt(PyTuple_GET_ITEM(dict, 1));
+ if (type == -1 && PyErr_Occurred()) {
+ return NULL;
+ }
+ if (type == DICT_TYPE_DIGESTED
+ || type == DICT_TYPE_UNDIGESTED
+ || type == DICT_TYPE_PREFIX)
+ {
+ *ptype = type;
+ return (ZstdDict*)PyTuple_GET_ITEM(dict, 0);
+ }
+ }
+
+ /* Wrong type */
+ PyErr_SetString(PyExc_TypeError,
+ "zstd_dict argument should be a ZstdDict object.");
+ return NULL;
+}
+
/* Format error message and set ZstdError. */
void
-set_zstd_error(const _zstd_state* const state,
- error_type type, size_t zstd_ret)
+set_zstd_error(const _zstd_state *state, error_type type, size_t zstd_ret)
{
- char *msg;
+ const char *msg;
assert(ZSTD_isError(zstd_ret));
+ if (state == NULL) {
+ return;
+ }
switch (type) {
case ERR_DECOMPRESS:
msg = "Unable to decompress Zstandard data: %s";
@@ -103,16 +140,13 @@ static const ParameterInfo dp_list[] = {
};
void
-set_parameter_error(const _zstd_state* const state, int is_compress,
- int key_v, int value_v)
+set_parameter_error(int is_compress, int key_v, int value_v)
{
ParameterInfo const *list;
int list_size;
- char const *name;
char *type;
ZSTD_bounds bounds;
- int i;
- char pos_msg[128];
+ char pos_msg[64];
if (is_compress) {
list = cp_list;
@@ -126,8 +160,8 @@ set_parameter_error(const _zstd_state* const state, int is_compress,
}
/* Find parameter's name */
- name = NULL;
- for (i = 0; i < list_size; i++) {
+ char const *name = NULL;
+ for (int i = 0; i < list_size; i++) {
if (key_v == (list+i)->parameter) {
name = (list+i)->parameter_name;
break;
@@ -149,20 +183,16 @@ set_parameter_error(const _zstd_state* const state, int is_compress,
bounds = ZSTD_dParam_getBounds(key_v);
}
if (ZSTD_isError(bounds.error)) {
- PyErr_Format(state->ZstdError,
- "Invalid zstd %s parameter \"%s\".",
+ PyErr_Format(PyExc_ValueError, "invalid %s parameter '%s'",
type, name);
return;
}
/* Error message */
- PyErr_Format(state->ZstdError,
- "Error when setting zstd %s parameter \"%s\", it "
- "should %d <= value <= %d, provided value is %d. "
- "(%d-bit build)",
- type, name,
- bounds.lowerBound, bounds.upperBound, value_v,
- 8*(int)sizeof(Py_ssize_t));
+ PyErr_Format(PyExc_ValueError,
+ "%s parameter '%s' received an illegal value %d; "
+ "the valid range is [%d, %d]",
+ type, name, value_v, bounds.lowerBound, bounds.upperBound);
}
static inline _zstd_state*
@@ -181,7 +211,7 @@ calculate_samples_stats(PyBytesObject *samples_bytes, PyObject *samples_sizes,
Py_ssize_t sizes_sum;
Py_ssize_t i;
- chunks_number = Py_SIZE(samples_sizes);
+ chunks_number = PyTuple_GET_SIZE(samples_sizes);
if ((size_t) chunks_number > UINT32_MAX) {
PyErr_Format(PyExc_ValueError,
"The number of samples should be <= %u.", UINT32_MAX);
@@ -195,20 +225,24 @@ calculate_samples_stats(PyBytesObject *samples_bytes, PyObject *samples_sizes,
return -1;
}
- sizes_sum = 0;
+ sizes_sum = PyBytes_GET_SIZE(samples_bytes);
for (i = 0; i < chunks_number; i++) {
- PyObject *size = PyTuple_GetItem(samples_sizes, i);
- (*chunk_sizes)[i] = PyLong_AsSize_t(size);
- if ((*chunk_sizes)[i] == (size_t)-1 && PyErr_Occurred()) {
- PyErr_Format(PyExc_ValueError,
- "Items in samples_sizes should be an int "
- "object, with a value between 0 and %u.", SIZE_MAX);
+ size_t size = PyLong_AsSize_t(PyTuple_GET_ITEM(samples_sizes, i));
+ (*chunk_sizes)[i] = size;
+ if (size == (size_t)-1 && PyErr_Occurred()) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ goto sum_error;
+ }
return -1;
}
- sizes_sum += (*chunk_sizes)[i];
+ if ((size_t)sizes_sum < size) {
+ goto sum_error;
+ }
+ sizes_sum -= size;
}
- if (sizes_sum != Py_SIZE(samples_bytes)) {
+ if (sizes_sum != 0) {
+sum_error:
PyErr_SetString(PyExc_ValueError,
"The samples size tuple doesn't match the "
"concatenation's size.");
@@ -264,7 +298,7 @@ _zstd_train_dict_impl(PyObject *module, PyBytesObject *samples_bytes,
/* Train the dictionary */
char *dst_dict_buffer = PyBytes_AS_STRING(dst_dict_bytes);
- char *samples_buffer = PyBytes_AS_STRING(samples_bytes);
+ const char *samples_buffer = PyBytes_AS_STRING(samples_bytes);
Py_BEGIN_ALLOW_THREADS
zstd_ret = ZDICT_trainFromBuffer(dst_dict_buffer, dict_size,
samples_buffer,
@@ -514,20 +548,10 @@ _zstd_set_parameter_types_impl(PyObject *module, PyObject *c_parameter_type,
{
_zstd_state* mod_state = get_zstd_state(module);
- if (!PyType_Check(c_parameter_type) || !PyType_Check(d_parameter_type)) {
- PyErr_SetString(PyExc_ValueError,
- "The two arguments should be CompressionParameter and "
- "DecompressionParameter types.");
- return NULL;
- }
-
- Py_XDECREF(mod_state->CParameter_type);
Py_INCREF(c_parameter_type);
- mod_state->CParameter_type = (PyTypeObject*)c_parameter_type;
-
- Py_XDECREF(mod_state->DParameter_type);
+ Py_XSETREF(mod_state->CParameter_type, (PyTypeObject*)c_parameter_type);
Py_INCREF(d_parameter_type);
- mod_state->DParameter_type = (PyTypeObject*)d_parameter_type;
+ Py_XSETREF(mod_state->DParameter_type, (PyTypeObject*)d_parameter_type);
Py_RETURN_NONE;
}
@@ -590,7 +614,6 @@ do { \
return -1;
}
if (PyModule_AddType(m, (PyTypeObject *)mod_state->ZstdError) < 0) {
- Py_DECREF(mod_state->ZstdError);
return -1;
}
diff --git a/Modules/_zstd/_zstdmodule.h b/Modules/_zstd/_zstdmodule.h
index b36486442c6..c73f15b3c52 100644
--- a/Modules/_zstd/_zstdmodule.h
+++ b/Modules/_zstd/_zstdmodule.h
@@ -5,6 +5,8 @@
#ifndef ZSTD_MODULE_H
#define ZSTD_MODULE_H
+#include "zstddict.h"
+
/* Type specs */
extern PyType_Spec zstd_dict_type_spec;
extern PyType_Spec zstd_compressor_type_spec;
@@ -43,13 +45,16 @@ typedef enum {
DICT_TYPE_PREFIX = 2
} dictionary_type;
+extern ZstdDict *
+_Py_parse_zstd_dict(const _zstd_state *state,
+ PyObject *dict, int *type);
+
/* Format error message and set ZstdError. */
extern void
-set_zstd_error(const _zstd_state* const state,
- const error_type type, size_t zstd_ret);
+set_zstd_error(const _zstd_state *state,
+ error_type type, size_t zstd_ret);
extern void
-set_parameter_error(const _zstd_state* const state, int is_compress,
- int key_v, int value_v);
+set_parameter_error(int is_compress, int key_v, int value_v);
#endif // !ZSTD_MODULE_H
diff --git a/Modules/_zstd/clinic/zstddict.c.h b/Modules/_zstd/clinic/zstddict.c.h
index 810befdaf71..79db85405d6 100644
--- a/Modules/_zstd/clinic/zstddict.c.h
+++ b/Modules/_zstd/clinic/zstddict.c.h
@@ -25,7 +25,7 @@ PyDoc_STRVAR(_zstd_ZstdDict_new__doc__,
"by multiple ZstdCompressor or ZstdDecompressor objects.");
static PyObject *
-_zstd_ZstdDict_new_impl(PyTypeObject *type, PyObject *dict_content,
+_zstd_ZstdDict_new_impl(PyTypeObject *type, Py_buffer *dict_content,
int is_raw);
static PyObject *
@@ -63,7 +63,7 @@ _zstd_ZstdDict_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 1;
- PyObject *dict_content;
+ Py_buffer dict_content = {NULL, NULL};
int is_raw = 0;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
@@ -71,7 +71,9 @@ _zstd_ZstdDict_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
if (!fastargs) {
goto exit;
}
- dict_content = fastargs[0];
+ if (PyObject_GetBuffer(fastargs[0], &dict_content, PyBUF_SIMPLE) != 0) {
+ goto exit;
+ }
if (!noptargs) {
goto skip_optional_kwonly;
}
@@ -80,12 +82,43 @@ _zstd_ZstdDict_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
goto exit;
}
skip_optional_kwonly:
- return_value = _zstd_ZstdDict_new_impl(type, dict_content, is_raw);
+ return_value = _zstd_ZstdDict_new_impl(type, &dict_content, is_raw);
exit:
+ /* Cleanup for dict_content */
+ if (dict_content.obj) {
+ PyBuffer_Release(&dict_content);
+ }
+
return return_value;
}
+PyDoc_STRVAR(_zstd_ZstdDict_dict_content__doc__,
+"The content of a Zstandard dictionary, as a bytes object.");
+#if defined(_zstd_ZstdDict_dict_content_DOCSTR)
+# undef _zstd_ZstdDict_dict_content_DOCSTR
+#endif
+#define _zstd_ZstdDict_dict_content_DOCSTR _zstd_ZstdDict_dict_content__doc__
+
+#if !defined(_zstd_ZstdDict_dict_content_DOCSTR)
+# define _zstd_ZstdDict_dict_content_DOCSTR NULL
+#endif
+#if defined(_ZSTD_ZSTDDICT_DICT_CONTENT_GETSETDEF)
+# undef _ZSTD_ZSTDDICT_DICT_CONTENT_GETSETDEF
+# define _ZSTD_ZSTDDICT_DICT_CONTENT_GETSETDEF {"dict_content", (getter)_zstd_ZstdDict_dict_content_get, (setter)_zstd_ZstdDict_dict_content_set, _zstd_ZstdDict_dict_content_DOCSTR},
+#else
+# define _ZSTD_ZSTDDICT_DICT_CONTENT_GETSETDEF {"dict_content", (getter)_zstd_ZstdDict_dict_content_get, NULL, _zstd_ZstdDict_dict_content_DOCSTR},
+#endif
+
+static PyObject *
+_zstd_ZstdDict_dict_content_get_impl(ZstdDict *self);
+
+static PyObject *
+_zstd_ZstdDict_dict_content_get(PyObject *self, void *Py_UNUSED(context))
+{
+ return _zstd_ZstdDict_dict_content_get_impl((ZstdDict *)self);
+}
+
PyDoc_STRVAR(_zstd_ZstdDict_as_digested_dict__doc__,
"Load as a digested dictionary to compressor.\n"
"\n"
@@ -189,4 +222,4 @@ _zstd_ZstdDict_as_prefix_get(PyObject *self, void *Py_UNUSED(context))
{
return _zstd_ZstdDict_as_prefix_get_impl((ZstdDict *)self);
}
-/*[clinic end generated code: output=47b12b5848b53ed8 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=4696cbc722e5fdfc input=a9049054013a1b77]*/
diff --git a/Modules/_zstd/compressor.c b/Modules/_zstd/compressor.c
index 31cb8c535c0..e1217635f60 100644
--- a/Modules/_zstd/compressor.c
+++ b/Modules/_zstd/compressor.c
@@ -16,7 +16,6 @@ class _zstd.ZstdCompressor "ZstdCompressor *" "&zstd_compressor_type_spec"
#include "_zstdmodule.h"
#include "buffer.h"
-#include "zstddict.h"
#include "internal/pycore_lock.h" // PyMutex_IsLocked
#include <stddef.h> // offsetof()
@@ -49,98 +48,103 @@ typedef struct {
#include "clinic/compressor.c.h"
static int
-_zstd_set_c_parameters(ZstdCompressor *self, PyObject *level_or_options,
- const char *arg_name, const char* arg_type)
+_zstd_set_c_level(ZstdCompressor *self, int level)
+{
+ /* Set integer compression level */
+ int min_level = ZSTD_minCLevel();
+ int max_level = ZSTD_maxCLevel();
+ if (level < min_level || level > max_level) {
+ PyErr_Format(PyExc_ValueError,
+ "illegal compression level %d; the valid range is [%d, %d]",
+ level, min_level, max_level);
+ return -1;
+ }
+
+ /* Save for generating ZSTD_CDICT */
+ self->compression_level = level;
+
+ /* Set compressionLevel to compression context */
+ size_t zstd_ret = ZSTD_CCtx_setParameter(
+ self->cctx, ZSTD_c_compressionLevel, level);
+
+ /* Check error */
+ if (ZSTD_isError(zstd_ret)) {
+ _zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
+ set_zstd_error(mod_state, ERR_SET_C_LEVEL, zstd_ret);
+ return -1;
+ }
+ return 0;
+}
+
+static int
+_zstd_set_c_parameters(ZstdCompressor *self, PyObject *options)
{
- size_t zstd_ret;
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
if (mod_state == NULL) {
return -1;
}
- /* Integer compression level */
- if (PyLong_Check(level_or_options)) {
- int level = PyLong_AsInt(level_or_options);
- if (level == -1 && PyErr_Occurred()) {
- PyErr_Format(PyExc_ValueError,
- "Compression level should be an int value between "
- "%d and %d.", ZSTD_minCLevel(), ZSTD_maxCLevel());
+ if (!PyDict_Check(options)) {
+ PyErr_Format(PyExc_TypeError,
+ "ZstdCompressor() argument 'options' must be dict, not %T",
+ options);
+ return -1;
+ }
+
+ Py_ssize_t pos = 0;
+ PyObject *key, *value;
+ while (PyDict_Next(options, &pos, &key, &value)) {
+ /* Check key type */
+ if (Py_TYPE(key) == mod_state->DParameter_type) {
+ PyErr_SetString(PyExc_TypeError,
+ "compression options dictionary key must not be a "
+ "DecompressionParameter attribute");
return -1;
}
- /* Save for generating ZSTD_CDICT */
- self->compression_level = level;
-
- /* Set compressionLevel to compression context */
- zstd_ret = ZSTD_CCtx_setParameter(self->cctx,
- ZSTD_c_compressionLevel,
- level);
-
- /* Check error */
- if (ZSTD_isError(zstd_ret)) {
- set_zstd_error(mod_state, ERR_SET_C_LEVEL, zstd_ret);
+ Py_INCREF(key);
+ Py_INCREF(value);
+ int key_v = PyLong_AsInt(key);
+ Py_DECREF(key);
+ if (key_v == -1 && PyErr_Occurred()) {
+ Py_DECREF(value);
return -1;
}
- return 0;
- }
- /* Options dict */
- if (PyDict_Check(level_or_options)) {
- PyObject *key, *value;
- Py_ssize_t pos = 0;
+ int value_v = PyLong_AsInt(value);
+ Py_DECREF(value);
+ if (value_v == -1 && PyErr_Occurred()) {
+ return -1;
+ }
- while (PyDict_Next(level_or_options, &pos, &key, &value)) {
- /* Check key type */
- if (Py_TYPE(key) == mod_state->DParameter_type) {
- PyErr_SetString(PyExc_TypeError,
- "Key of compression options dict should "
- "NOT be a DecompressionParameter attribute.");
+ if (key_v == ZSTD_c_compressionLevel) {
+ if (_zstd_set_c_level(self, value_v) < 0) {
return -1;
}
-
- int key_v = PyLong_AsInt(key);
- if (key_v == -1 && PyErr_Occurred()) {
- PyErr_SetString(PyExc_ValueError,
- "Key of options dict should be either a "
- "CompressionParameter attribute or an int.");
- return -1;
+ continue;
+ }
+ if (key_v == ZSTD_c_nbWorkers) {
+ /* From the zstd library docs:
+ 1. When nbWorkers >= 1, triggers asynchronous mode when
+ used with ZSTD_compressStream2().
+ 2, Default value is `0`, aka "single-threaded mode" : no
+ worker is spawned, compression is performed inside
+ caller's thread, all invocations are blocking. */
+ if (value_v != 0) {
+ self->use_multithread = 1;
}
+ }
- int value_v = PyLong_AsInt(value);
- if (value_v == -1 && PyErr_Occurred()) {
- PyErr_SetString(PyExc_ValueError,
- "Value of options dict should be an int.");
- return -1;
- }
+ /* Set parameter to compression context */
+ size_t zstd_ret = ZSTD_CCtx_setParameter(self->cctx, key_v, value_v);
- if (key_v == ZSTD_c_compressionLevel) {
- /* Save for generating ZSTD_CDICT */
- self->compression_level = value_v;
- }
- else if (key_v == ZSTD_c_nbWorkers) {
- /* From the zstd library docs:
- 1. When nbWorkers >= 1, triggers asynchronous mode when
- used with ZSTD_compressStream2().
- 2, Default value is `0`, aka "single-threaded mode" : no
- worker is spawned, compression is performed inside
- caller's thread, all invocations are blocking. */
- if (value_v != 0) {
- self->use_multithread = 1;
- }
- }
-
- /* Set parameter to compression context */
- zstd_ret = ZSTD_CCtx_setParameter(self->cctx, key_v, value_v);
- if (ZSTD_isError(zstd_ret)) {
- set_parameter_error(mod_state, 1, key_v, value_v);
- return -1;
- }
+ /* Check error */
+ if (ZSTD_isError(zstd_ret)) {
+ set_parameter_error(1, key_v, value_v);
+ return -1;
}
- return 0;
}
- PyErr_Format(PyExc_TypeError,
- "Invalid type for %s. Expected %s", arg_name, arg_type);
- return -1;
+ return 0;
}
static void
@@ -173,11 +177,8 @@ _get_CDict(ZstdDict *self, int compressionLevel)
}
if (capsule == NULL) {
/* Create ZSTD_CDict instance */
- char *dict_buffer = PyBytes_AS_STRING(self->dict_content);
- Py_ssize_t dict_len = Py_SIZE(self->dict_content);
Py_BEGIN_ALLOW_THREADS
- cdict = ZSTD_createCDict(dict_buffer,
- dict_len,
+ cdict = ZSTD_createCDict(self->dict_buffer, self->dict_len,
compressionLevel);
Py_END_ALLOW_THREADS
@@ -236,17 +237,13 @@ _zstd_load_impl(ZstdCompressor *self, ZstdDict *zd,
else if (type == DICT_TYPE_UNDIGESTED) {
/* Load a dictionary.
It doesn't override compression context's parameters. */
- zstd_ret = ZSTD_CCtx_loadDictionary(
- self->cctx,
- PyBytes_AS_STRING(zd->dict_content),
- Py_SIZE(zd->dict_content));
+ zstd_ret = ZSTD_CCtx_loadDictionary(self->cctx, zd->dict_buffer,
+ zd->dict_len);
}
else if (type == DICT_TYPE_PREFIX) {
/* Load a prefix */
- zstd_ret = ZSTD_CCtx_refPrefix(
- self->cctx,
- PyBytes_AS_STRING(zd->dict_content),
- Py_SIZE(zd->dict_content));
+ zstd_ret = ZSTD_CCtx_refPrefix(self->cctx, zd->dict_buffer,
+ zd->dict_len);
}
else {
Py_UNREACHABLE();
@@ -264,56 +261,17 @@ static int
_zstd_load_c_dict(ZstdCompressor *self, PyObject *dict)
{
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state == NULL) {
+ /* When compressing, use undigested dictionary by default. */
+ int type = DICT_TYPE_UNDIGESTED;
+ ZstdDict *zd = _Py_parse_zstd_dict(mod_state, dict, &type);
+ if (zd == NULL) {
return -1;
}
- ZstdDict *zd;
- int type, ret;
-
- /* Check ZstdDict */
- ret = PyObject_IsInstance(dict, (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
- return -1;
- }
- else if (ret > 0) {
- /* When compressing, use undigested dictionary by default. */
- zd = (ZstdDict*)dict;
- type = DICT_TYPE_UNDIGESTED;
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
-
- /* Check (ZstdDict, type) */
- if (PyTuple_CheckExact(dict) && PyTuple_GET_SIZE(dict) == 2) {
- /* Check ZstdDict */
- ret = PyObject_IsInstance(PyTuple_GET_ITEM(dict, 0),
- (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
- return -1;
- }
- else if (ret > 0) {
- /* type == -1 may indicate an error. */
- type = PyLong_AsInt(PyTuple_GET_ITEM(dict, 1));
- if (type == DICT_TYPE_DIGESTED
- || type == DICT_TYPE_UNDIGESTED
- || type == DICT_TYPE_PREFIX)
- {
- assert(type >= 0);
- zd = (ZstdDict*)PyTuple_GET_ITEM(dict, 0);
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
- }
- }
-
- /* Wrong type */
- PyErr_SetString(PyExc_TypeError,
- "zstd_dict argument should be ZstdDict object.");
- return -1;
+ int ret;
+ PyMutex_Lock(&zd->lock);
+ ret = _zstd_load_impl(self, zd, mod_state, type);
+ PyMutex_Unlock(&zd->lock);
+ return ret;
}
/*[clinic input]
@@ -361,20 +319,35 @@ _zstd_ZstdCompressor_new_impl(PyTypeObject *type, PyObject *level,
self->last_mode = ZSTD_e_end;
if (level != Py_None && options != Py_None) {
- PyErr_SetString(PyExc_RuntimeError,
+ PyErr_SetString(PyExc_TypeError,
"Only one of level or options should be used.");
goto error;
}
- /* Set compressLevel/options to compression context */
+ /* Set compression level */
if (level != Py_None) {
- if (_zstd_set_c_parameters(self, level, "level", "int") < 0) {
+ if (!PyLong_Check(level)) {
+ PyErr_SetString(PyExc_TypeError,
+ "invalid type for level, expected int");
+ goto error;
+ }
+ int level_v = PyLong_AsInt(level);
+ if (level_v == -1 && PyErr_Occurred()) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ PyErr_Format(PyExc_ValueError,
+ "illegal compression level; the valid range is [%d, %d]",
+ ZSTD_minCLevel(), ZSTD_maxCLevel());
+ }
+ goto error;
+ }
+ if (_zstd_set_c_level(self, level_v) < 0) {
goto error;
}
}
+ /* Set options dictionary */
if (options != Py_None) {
- if (_zstd_set_c_parameters(self, options, "options", "dict") < 0) {
+ if (_zstd_set_c_parameters(self, options) < 0) {
goto error;
}
}
@@ -465,9 +438,7 @@ compress_lock_held(ZstdCompressor *self, Py_buffer *data,
/* Check error */
if (ZSTD_isError(zstd_ret)) {
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state != NULL) {
- set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
- }
+ set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
goto error;
}
@@ -496,7 +467,7 @@ error:
return NULL;
}
-#ifdef Py_DEBUG
+#ifndef NDEBUG
static inline int
mt_continue_should_break(ZSTD_inBuffer *in, ZSTD_outBuffer *out)
{
@@ -537,9 +508,7 @@ compress_mt_continue_lock_held(ZstdCompressor *self, Py_buffer *data)
/* Check error */
if (ZSTD_isError(zstd_ret)) {
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state != NULL) {
- set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
- }
+ set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
goto error;
}
diff --git a/Modules/_zstd/decompressor.c b/Modules/_zstd/decompressor.c
index d084f0847c7..c53d6e4cb05 100644
--- a/Modules/_zstd/decompressor.c
+++ b/Modules/_zstd/decompressor.c
@@ -16,7 +16,6 @@ class _zstd.ZstdDecompressor "ZstdDecompressor *" "&zstd_decompressor_type_spec"
#include "_zstdmodule.h"
#include "buffer.h"
-#include "zstddict.h"
#include "internal/pycore_lock.h" // PyMutex_IsLocked
#include <stdbool.h> // bool
@@ -61,17 +60,10 @@ _get_DDict(ZstdDict *self)
assert(PyMutex_IsLocked(&self->lock));
ZSTD_DDict *ret;
- /* Already created */
- if (self->d_dict != NULL) {
- return self->d_dict;
- }
-
if (self->d_dict == NULL) {
/* Create ZSTD_DDict instance from dictionary content */
- char *dict_buffer = PyBytes_AS_STRING(self->dict_content);
- Py_ssize_t dict_len = Py_SIZE(self->dict_content);
Py_BEGIN_ALLOW_THREADS
- ret = ZSTD_createDDict(dict_buffer, dict_len);
+ ret = ZSTD_createDDict(self->dict_buffer, self->dict_len);
Py_END_ALLOW_THREADS
self->d_dict = ret;
@@ -88,56 +80,52 @@ _get_DDict(ZstdDict *self)
return self->d_dict;
}
-/* Set decompression parameters to decompression context */
static int
_zstd_set_d_parameters(ZstdDecompressor *self, PyObject *options)
{
- size_t zstd_ret;
- PyObject *key, *value;
- Py_ssize_t pos;
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
if (mod_state == NULL) {
return -1;
}
if (!PyDict_Check(options)) {
- PyErr_SetString(PyExc_TypeError,
- "options argument should be dict object.");
+ PyErr_Format(PyExc_TypeError,
+ "ZstdDecompressor() argument 'options' must be dict, not %T",
+ options);
return -1;
}
- pos = 0;
+ Py_ssize_t pos = 0;
+ PyObject *key, *value;
while (PyDict_Next(options, &pos, &key, &value)) {
/* Check key type */
if (Py_TYPE(key) == mod_state->CParameter_type) {
PyErr_SetString(PyExc_TypeError,
- "Key of decompression options dict should "
- "NOT be a CompressionParameter attribute.");
+ "compression options dictionary key must not be a "
+ "CompressionParameter attribute");
return -1;
}
- /* Both key & value should be 32-bit signed int */
+ Py_INCREF(key);
+ Py_INCREF(value);
int key_v = PyLong_AsInt(key);
+ Py_DECREF(key);
if (key_v == -1 && PyErr_Occurred()) {
- PyErr_SetString(PyExc_ValueError,
- "Key of options dict should be either a "
- "DecompressionParameter attribute or an int.");
return -1;
}
int value_v = PyLong_AsInt(value);
+ Py_DECREF(value);
if (value_v == -1 && PyErr_Occurred()) {
- PyErr_SetString(PyExc_ValueError,
- "Value of options dict should be an int.");
return -1;
}
/* Set parameter to compression context */
- zstd_ret = ZSTD_DCtx_setParameter(self->dctx, key_v, value_v);
+ size_t zstd_ret = ZSTD_DCtx_setParameter(self->dctx, key_v, value_v);
/* Check error */
if (ZSTD_isError(zstd_ret)) {
- set_parameter_error(mod_state, 0, key_v, value_v);
+ set_parameter_error(0, key_v, value_v);
return -1;
}
}
@@ -160,17 +148,13 @@ _zstd_load_impl(ZstdDecompressor *self, ZstdDict *zd,
}
else if (type == DICT_TYPE_UNDIGESTED) {
/* Load a dictionary */
- zstd_ret = ZSTD_DCtx_loadDictionary(
- self->dctx,
- PyBytes_AS_STRING(zd->dict_content),
- Py_SIZE(zd->dict_content));
+ zstd_ret = ZSTD_DCtx_loadDictionary(self->dctx, zd->dict_buffer,
+ zd->dict_len);
}
else if (type == DICT_TYPE_PREFIX) {
/* Load a prefix */
- zstd_ret = ZSTD_DCtx_refPrefix(
- self->dctx,
- PyBytes_AS_STRING(zd->dict_content),
- Py_SIZE(zd->dict_content));
+ zstd_ret = ZSTD_DCtx_refPrefix(self->dctx, zd->dict_buffer,
+ zd->dict_len);
}
else {
/* Impossible code path */
@@ -192,56 +176,17 @@ static int
_zstd_load_d_dict(ZstdDecompressor *self, PyObject *dict)
{
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state == NULL) {
+ /* When decompressing, use digested dictionary by default. */
+ int type = DICT_TYPE_DIGESTED;
+ ZstdDict *zd = _Py_parse_zstd_dict(mod_state, dict, &type);
+ if (zd == NULL) {
return -1;
}
- ZstdDict *zd;
- int type, ret;
-
- /* Check ZstdDict */
- ret = PyObject_IsInstance(dict, (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
- return -1;
- }
- else if (ret > 0) {
- /* When decompressing, use digested dictionary by default. */
- zd = (ZstdDict*)dict;
- type = DICT_TYPE_DIGESTED;
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
-
- /* Check (ZstdDict, type) */
- if (PyTuple_CheckExact(dict) && PyTuple_GET_SIZE(dict) == 2) {
- /* Check ZstdDict */
- ret = PyObject_IsInstance(PyTuple_GET_ITEM(dict, 0),
- (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
- return -1;
- }
- else if (ret > 0) {
- /* type == -1 may indicate an error. */
- type = PyLong_AsInt(PyTuple_GET_ITEM(dict, 1));
- if (type == DICT_TYPE_DIGESTED
- || type == DICT_TYPE_UNDIGESTED
- || type == DICT_TYPE_PREFIX)
- {
- assert(type >= 0);
- zd = (ZstdDict*)PyTuple_GET_ITEM(dict, 0);
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
- }
- }
-
- /* Wrong type */
- PyErr_SetString(PyExc_TypeError,
- "zstd_dict argument should be ZstdDict object.");
- return -1;
+ int ret;
+ PyMutex_Lock(&zd->lock);
+ ret = _zstd_load_impl(self, zd, mod_state, type);
+ PyMutex_Unlock(&zd->lock);
+ return ret;
}
/*
@@ -292,9 +237,7 @@ decompress_lock_held(ZstdDecompressor *self, ZSTD_inBuffer *in,
/* Check error */
if (ZSTD_isError(zstd_ret)) {
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state != NULL) {
- set_zstd_error(mod_state, ERR_DECOMPRESS, zstd_ret);
- }
+ set_zstd_error(mod_state, ERR_DECOMPRESS, zstd_ret);
goto error;
}
@@ -583,7 +526,7 @@ _zstd_ZstdDecompressor_new_impl(PyTypeObject *type, PyObject *zstd_dict,
self->dict = zstd_dict;
}
- /* Set option to decompression context */
+ /* Set options dictionary */
if (options != Py_None) {
if (_zstd_set_d_parameters(self, options) < 0) {
goto error;
diff --git a/Modules/_zstd/zstddict.c b/Modules/_zstd/zstddict.c
index e3e9e5d0645..14f74aaed46 100644
--- a/Modules/_zstd/zstddict.c
+++ b/Modules/_zstd/zstddict.c
@@ -15,7 +15,6 @@ class _zstd.ZstdDict "ZstdDict *" "&zstd_dict_type_spec"
#include "Python.h"
#include "_zstdmodule.h"
-#include "zstddict.h"
#include "clinic/zstddict.c.h"
#include "internal/pycore_lock.h" // PyMutex_IsLocked
@@ -26,7 +25,7 @@ class _zstd.ZstdDict "ZstdDict *" "&zstd_dict_type_spec"
/*[clinic input]
@classmethod
_zstd.ZstdDict.__new__ as _zstd_ZstdDict_new
- dict_content: object
+ dict_content: Py_buffer
The content of a Zstandard dictionary as a bytes-like object.
/
*
@@ -42,17 +41,25 @@ by multiple ZstdCompressor or ZstdDecompressor objects.
[clinic start generated code]*/
static PyObject *
-_zstd_ZstdDict_new_impl(PyTypeObject *type, PyObject *dict_content,
+_zstd_ZstdDict_new_impl(PyTypeObject *type, Py_buffer *dict_content,
int is_raw)
-/*[clinic end generated code: output=3ebff839cb3be6d7 input=6b5de413869ae878]*/
+/*[clinic end generated code: output=685b7406a48b0949 input=9e8c493e31c98383]*/
{
+ /* All dictionaries must be at least 8 bytes */
+ if (dict_content->len < 8) {
+ PyErr_SetString(PyExc_ValueError,
+ "Zstandard dictionary content too short "
+ "(must have at least eight bytes)");
+ return NULL;
+ }
+
ZstdDict* self = PyObject_GC_New(ZstdDict, type);
if (self == NULL) {
- goto error;
+ return NULL;
}
- self->dict_content = NULL;
self->d_dict = NULL;
+ self->dict_buffer = NULL;
self->dict_id = 0;
self->lock = (PyMutex){0};
@@ -62,39 +69,26 @@ _zstd_ZstdDict_new_impl(PyTypeObject *type, PyObject *dict_content,
goto error;
}
- /* Check dict_content's type */
- self->dict_content = PyBytes_FromObject(dict_content);
- if (self->dict_content == NULL) {
- PyErr_SetString(PyExc_TypeError,
- "dict_content argument should be bytes-like object.");
- goto error;
- }
-
- /* Both ordinary dictionary and "raw content" dictionary should
- at least 8 bytes */
- if (Py_SIZE(self->dict_content) < 8) {
- PyErr_SetString(PyExc_ValueError,
- "Zstandard dictionary content should at least "
- "8 bytes.");
+ self->dict_buffer = PyMem_Malloc(dict_content->len);
+ if (!self->dict_buffer) {
+ PyErr_NoMemory();
goto error;
}
+ memcpy(self->dict_buffer, dict_content->buf, dict_content->len);
+ self->dict_len = dict_content->len;
/* Get dict_id, 0 means "raw content" dictionary. */
- self->dict_id = ZSTD_getDictID_fromDict(
- PyBytes_AS_STRING(self->dict_content),
- Py_SIZE(self->dict_content));
+ self->dict_id = ZSTD_getDictID_fromDict(self->dict_buffer, self->dict_len);
/* Check validity for ordinary dictionary */
if (!is_raw && self->dict_id == 0) {
- char *msg = "Invalid Zstandard dictionary and is_raw not set.\n";
- PyErr_SetString(PyExc_ValueError, msg);
+ PyErr_SetString(PyExc_ValueError, "invalid Zstandard dictionary");
goto error;
}
- // Can only track self once self->dict_content is included
PyObject_GC_Track(self);
- return (PyObject*)self;
+ return (PyObject *)self;
error:
Py_XDECREF(self);
@@ -115,12 +109,12 @@ ZstdDict_dealloc(PyObject *ob)
assert(!PyMutex_IsLocked(&self->lock));
- /* Release dict_content after Free ZSTD_CDict/ZSTD_DDict instances */
- Py_CLEAR(self->dict_content);
+ /* Release dict_buffer after freeing ZSTD_CDict/ZSTD_DDict instances */
+ PyMem_Free(self->dict_buffer);
Py_CLEAR(self->c_dicts);
PyTypeObject *tp = Py_TYPE(self);
- PyObject_GC_Del(ob);
+ tp->tp_free(self);
Py_DECREF(tp);
}
@@ -131,27 +125,35 @@ PyDoc_STRVAR(ZstdDict_dictid_doc,
"The special value '0' means a 'raw content' dictionary,"
"without any restrictions on format or content.");
-PyDoc_STRVAR(ZstdDict_dictcontent_doc,
-"The content of a Zstandard dictionary, as a bytes object.");
-
static PyObject *
-ZstdDict_str(PyObject *ob)
+ZstdDict_repr(PyObject *ob)
{
ZstdDict *dict = ZstdDict_CAST(ob);
return PyUnicode_FromFormat("<ZstdDict dict_id=%u dict_size=%zd>",
- dict->dict_id, Py_SIZE(dict->dict_content));
+ (unsigned int)dict->dict_id, dict->dict_len);
}
static PyMemberDef ZstdDict_members[] = {
- {"dict_id", Py_T_UINT, offsetof(ZstdDict, dict_id), Py_READONLY,
- ZstdDict_dictid_doc},
- {"dict_content", Py_T_OBJECT_EX, offsetof(ZstdDict, dict_content),
- Py_READONLY, ZstdDict_dictcontent_doc},
+ {"dict_id", Py_T_UINT, offsetof(ZstdDict, dict_id), Py_READONLY, ZstdDict_dictid_doc},
{NULL}
};
/*[clinic input]
@getter
+_zstd.ZstdDict.dict_content
+
+The content of a Zstandard dictionary, as a bytes object.
+[clinic start generated code]*/
+
+static PyObject *
+_zstd_ZstdDict_dict_content_get_impl(ZstdDict *self)
+/*[clinic end generated code: output=0d05caa5b550eabb input=4ed526d1c151c596]*/
+{
+ return PyBytes_FromStringAndSize(self->dict_buffer, self->dict_len);
+}
+
+/*[clinic input]
+@getter
_zstd.ZstdDict.as_digested_dict
Load as a digested dictionary to compressor.
@@ -219,6 +221,7 @@ _zstd_ZstdDict_as_prefix_get_impl(ZstdDict *self)
}
static PyGetSetDef ZstdDict_getset[] = {
+ _ZSTD_ZSTDDICT_DICT_CONTENT_GETSETDEF
_ZSTD_ZSTDDICT_AS_DIGESTED_DICT_GETSETDEF
_ZSTD_ZSTDDICT_AS_UNDIGESTED_DICT_GETSETDEF
_ZSTD_ZSTDDICT_AS_PREFIX_GETSETDEF
@@ -229,8 +232,7 @@ static Py_ssize_t
ZstdDict_length(PyObject *ob)
{
ZstdDict *self = ZstdDict_CAST(ob);
- assert(PyBytes_Check(self->dict_content));
- return Py_SIZE(self->dict_content);
+ return self->dict_len;
}
static int
@@ -238,7 +240,6 @@ ZstdDict_traverse(PyObject *ob, visitproc visit, void *arg)
{
ZstdDict *self = ZstdDict_CAST(ob);
Py_VISIT(self->c_dicts);
- Py_VISIT(self->dict_content);
return 0;
}
@@ -246,7 +247,7 @@ static int
ZstdDict_clear(PyObject *ob)
{
ZstdDict *self = ZstdDict_CAST(ob);
- Py_CLEAR(self->dict_content);
+ Py_CLEAR(self->c_dicts);
return 0;
}
@@ -255,7 +256,7 @@ static PyType_Slot zstddict_slots[] = {
{Py_tp_getset, ZstdDict_getset},
{Py_tp_new, _zstd_ZstdDict_new},
{Py_tp_dealloc, ZstdDict_dealloc},
- {Py_tp_str, ZstdDict_str},
+ {Py_tp_repr, ZstdDict_repr},
{Py_tp_doc, (void *)_zstd_ZstdDict_new__doc__},
{Py_sq_length, ZstdDict_length},
{Py_tp_traverse, ZstdDict_traverse},
diff --git a/Modules/_zstd/zstddict.h b/Modules/_zstd/zstddict.h
index dcba0f21852..4a403416dbd 100644
--- a/Modules/_zstd/zstddict.h
+++ b/Modules/_zstd/zstddict.h
@@ -15,8 +15,10 @@ typedef struct {
ZSTD_DDict *d_dict;
PyObject *c_dicts;
- /* Content of the dictionary, bytes object. */
- PyObject *dict_content;
+ /* Dictionary content. */
+ char *dict_buffer;
+ Py_ssize_t dict_len;
+
/* Dictionary id */
uint32_t dict_id;
diff --git a/Modules/blake2module.c b/Modules/blake2module.c
index f9acc57f1b2..07aa89f573f 100644
--- a/Modules/blake2module.c
+++ b/Modules/blake2module.c
@@ -655,8 +655,7 @@ error:
/*[clinic input]
@classmethod
_blake2.blake2b.__new__ as py_blake2b_new
- data: object(c_default="NULL") = b''
- /
+ data as data_obj: object(c_default="NULL") = b''
*
digest_size: int(c_default="HACL_HASH_BLAKE2B_OUT_BYTES") = _blake2.blake2b.MAX_DIGEST_SIZE
key: Py_buffer(c_default="NULL", py_default="b''") = None
@@ -670,26 +669,31 @@ _blake2.blake2b.__new__ as py_blake2b_new
inner_size: int = 0
last_node: bool = False
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new BLAKE2b hash object.
[clinic start generated code]*/
static PyObject *
-py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2b_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity)
-/*[clinic end generated code: output=32bfd8f043c6896f input=8fee2b7b11428b2d]*/
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string)
+/*[clinic end generated code: output=de64bd850606b6a0 input=78cf60a2922d2f90]*/
{
+ PyObject *data;
+ if (_Py_hashlib_data_argument(&data, data_obj, string) < 0) {
+ return NULL;
+ }
return py_blake2b_or_s_new(type, data, digest_size, key, salt, person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
}
/*[clinic input]
@classmethod
_blake2.blake2s.__new__ as py_blake2s_new
- data: object(c_default="NULL") = b''
- /
+ data as data_obj: object(c_default="NULL") = b''
*
digest_size: int(c_default="HACL_HASH_BLAKE2S_OUT_BYTES") = _blake2.blake2s.MAX_DIGEST_SIZE
key: Py_buffer(c_default="NULL", py_default="b''") = None
@@ -703,18 +707,24 @@ _blake2.blake2s.__new__ as py_blake2s_new
inner_size: int = 0
last_node: bool = False
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new BLAKE2s hash object.
[clinic start generated code]*/
static PyObject *
-py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2s_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity)
-/*[clinic end generated code: output=556181f73905c686 input=8165a11980eac7f3]*/
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string)
+/*[clinic end generated code: output=582a0c4295cc3a3c input=6843d6332eefd295]*/
{
+ PyObject *data;
+ if (_Py_hashlib_data_argument(&data, data_obj, string) < 0) {
+ return NULL;
+ }
return py_blake2b_or_s_new(type, data, digest_size, key, salt, person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
}
diff --git a/Modules/clinic/_cursesmodule.c.h b/Modules/clinic/_cursesmodule.c.h
index 552360eb80a..49c864318c8 100644
--- a/Modules/clinic/_cursesmodule.c.h
+++ b/Modules/clinic/_cursesmodule.c.h
@@ -733,23 +733,13 @@ PyDoc_STRVAR(_curses_window_getbkgd__doc__,
#define _CURSES_WINDOW_GETBKGD_METHODDEF \
{"getbkgd", (PyCFunction)_curses_window_getbkgd, METH_NOARGS, _curses_window_getbkgd__doc__},
-static long
+static PyObject *
_curses_window_getbkgd_impl(PyCursesWindowObject *self);
static PyObject *
_curses_window_getbkgd(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- PyObject *return_value = NULL;
- long _return_value;
-
- _return_value = _curses_window_getbkgd_impl((PyCursesWindowObject *)self);
- if ((_return_value == -1) && PyErr_Occurred()) {
- goto exit;
- }
- return_value = PyLong_FromLong(_return_value);
-
-exit:
- return return_value;
+ return _curses_window_getbkgd_impl((PyCursesWindowObject *)self);
}
PyDoc_STRVAR(_curses_window_getch__doc__,
@@ -768,7 +758,7 @@ PyDoc_STRVAR(_curses_window_getch__doc__,
#define _CURSES_WINDOW_GETCH_METHODDEF \
{"getch", (PyCFunction)_curses_window_getch, METH_VARARGS, _curses_window_getch__doc__},
-static int
+static PyObject *
_curses_window_getch_impl(PyCursesWindowObject *self, int group_right_1,
int y, int x);
@@ -779,7 +769,6 @@ _curses_window_getch(PyObject *self, PyObject *args)
int group_right_1 = 0;
int y = 0;
int x = 0;
- int _return_value;
switch (PyTuple_GET_SIZE(args)) {
case 0:
@@ -794,11 +783,7 @@ _curses_window_getch(PyObject *self, PyObject *args)
PyErr_SetString(PyExc_TypeError, "_curses.window.getch requires 0 to 2 arguments");
goto exit;
}
- _return_value = _curses_window_getch_impl((PyCursesWindowObject *)self, group_right_1, y, x);
- if ((_return_value == -1) && PyErr_Occurred()) {
- goto exit;
- }
- return_value = PyLong_FromLong((long)_return_value);
+ return_value = _curses_window_getch_impl((PyCursesWindowObject *)self, group_right_1, y, x);
exit:
return return_value;
@@ -1055,7 +1040,7 @@ PyDoc_STRVAR(_curses_window_inch__doc__,
#define _CURSES_WINDOW_INCH_METHODDEF \
{"inch", (PyCFunction)_curses_window_inch, METH_VARARGS, _curses_window_inch__doc__},
-static unsigned long
+static PyObject *
_curses_window_inch_impl(PyCursesWindowObject *self, int group_right_1,
int y, int x);
@@ -1066,7 +1051,6 @@ _curses_window_inch(PyObject *self, PyObject *args)
int group_right_1 = 0;
int y = 0;
int x = 0;
- unsigned long _return_value;
switch (PyTuple_GET_SIZE(args)) {
case 0:
@@ -1081,11 +1065,7 @@ _curses_window_inch(PyObject *self, PyObject *args)
PyErr_SetString(PyExc_TypeError, "_curses.window.inch requires 0 to 2 arguments");
goto exit;
}
- _return_value = _curses_window_inch_impl((PyCursesWindowObject *)self, group_right_1, y, x);
- if ((_return_value == (unsigned long)-1) && PyErr_Occurred()) {
- goto exit;
- }
- return_value = PyLong_FromUnsignedLong(_return_value);
+ return_value = _curses_window_inch_impl((PyCursesWindowObject *)self, group_right_1, y, x);
exit:
return return_value;
@@ -4440,4 +4420,4 @@ _curses_has_extended_color_support(PyObject *module, PyObject *Py_UNUSED(ignored
#ifndef _CURSES_ASSUME_DEFAULT_COLORS_METHODDEF
#define _CURSES_ASSUME_DEFAULT_COLORS_METHODDEF
#endif /* !defined(_CURSES_ASSUME_DEFAULT_COLORS_METHODDEF) */
-/*[clinic end generated code: output=42b2923d88c8d0f6 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=a083473003179b30 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h
index 59ab46ca3f0..61ea10e2a48 100644
--- a/Modules/clinic/_hashopenssl.c.h
+++ b/Modules/clinic/_hashopenssl.c.h
@@ -10,98 +10,98 @@ preserve
#include "pycore_long.h" // _PyLong_UnsignedLong_Converter()
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
-PyDoc_STRVAR(EVP_copy__doc__,
+PyDoc_STRVAR(_hashlib_HASH_copy__doc__,
"copy($self, /)\n"
"--\n"
"\n"
"Return a copy of the hash object.");
-#define EVP_COPY_METHODDEF \
- {"copy", (PyCFunction)EVP_copy, METH_NOARGS, EVP_copy__doc__},
+#define _HASHLIB_HASH_COPY_METHODDEF \
+ {"copy", (PyCFunction)_hashlib_HASH_copy, METH_NOARGS, _hashlib_HASH_copy__doc__},
static PyObject *
-EVP_copy_impl(EVPobject *self);
+_hashlib_HASH_copy_impl(HASHobject *self);
static PyObject *
-EVP_copy(PyObject *self, PyObject *Py_UNUSED(ignored))
+_hashlib_HASH_copy(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- return EVP_copy_impl((EVPobject *)self);
+ return _hashlib_HASH_copy_impl((HASHobject *)self);
}
-PyDoc_STRVAR(EVP_digest__doc__,
+PyDoc_STRVAR(_hashlib_HASH_digest__doc__,
"digest($self, /)\n"
"--\n"
"\n"
"Return the digest value as a bytes object.");
-#define EVP_DIGEST_METHODDEF \
- {"digest", (PyCFunction)EVP_digest, METH_NOARGS, EVP_digest__doc__},
+#define _HASHLIB_HASH_DIGEST_METHODDEF \
+ {"digest", (PyCFunction)_hashlib_HASH_digest, METH_NOARGS, _hashlib_HASH_digest__doc__},
static PyObject *
-EVP_digest_impl(EVPobject *self);
+_hashlib_HASH_digest_impl(HASHobject *self);
static PyObject *
-EVP_digest(PyObject *self, PyObject *Py_UNUSED(ignored))
+_hashlib_HASH_digest(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- return EVP_digest_impl((EVPobject *)self);
+ return _hashlib_HASH_digest_impl((HASHobject *)self);
}
-PyDoc_STRVAR(EVP_hexdigest__doc__,
+PyDoc_STRVAR(_hashlib_HASH_hexdigest__doc__,
"hexdigest($self, /)\n"
"--\n"
"\n"
"Return the digest value as a string of hexadecimal digits.");
-#define EVP_HEXDIGEST_METHODDEF \
- {"hexdigest", (PyCFunction)EVP_hexdigest, METH_NOARGS, EVP_hexdigest__doc__},
+#define _HASHLIB_HASH_HEXDIGEST_METHODDEF \
+ {"hexdigest", (PyCFunction)_hashlib_HASH_hexdigest, METH_NOARGS, _hashlib_HASH_hexdigest__doc__},
static PyObject *
-EVP_hexdigest_impl(EVPobject *self);
+_hashlib_HASH_hexdigest_impl(HASHobject *self);
static PyObject *
-EVP_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored))
+_hashlib_HASH_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored))
{
- return EVP_hexdigest_impl((EVPobject *)self);
+ return _hashlib_HASH_hexdigest_impl((HASHobject *)self);
}
-PyDoc_STRVAR(EVP_update__doc__,
+PyDoc_STRVAR(_hashlib_HASH_update__doc__,
"update($self, obj, /)\n"
"--\n"
"\n"
"Update this hash object\'s state with the provided string.");
-#define EVP_UPDATE_METHODDEF \
- {"update", (PyCFunction)EVP_update, METH_O, EVP_update__doc__},
+#define _HASHLIB_HASH_UPDATE_METHODDEF \
+ {"update", (PyCFunction)_hashlib_HASH_update, METH_O, _hashlib_HASH_update__doc__},
static PyObject *
-EVP_update_impl(EVPobject *self, PyObject *obj);
+_hashlib_HASH_update_impl(HASHobject *self, PyObject *obj);
static PyObject *
-EVP_update(PyObject *self, PyObject *obj)
+_hashlib_HASH_update(PyObject *self, PyObject *obj)
{
PyObject *return_value = NULL;
- return_value = EVP_update_impl((EVPobject *)self, obj);
+ return_value = _hashlib_HASH_update_impl((HASHobject *)self, obj);
return return_value;
}
#if defined(PY_OPENSSL_HAS_SHAKE)
-PyDoc_STRVAR(EVPXOF_digest__doc__,
+PyDoc_STRVAR(_hashlib_HASHXOF_digest__doc__,
"digest($self, /, length)\n"
"--\n"
"\n"
"Return the digest value as a bytes object.");
-#define EVPXOF_DIGEST_METHODDEF \
- {"digest", _PyCFunction_CAST(EVPXOF_digest), METH_FASTCALL|METH_KEYWORDS, EVPXOF_digest__doc__},
+#define _HASHLIB_HASHXOF_DIGEST_METHODDEF \
+ {"digest", _PyCFunction_CAST(_hashlib_HASHXOF_digest), METH_FASTCALL|METH_KEYWORDS, _hashlib_HASHXOF_digest__doc__},
static PyObject *
-EVPXOF_digest_impl(EVPobject *self, Py_ssize_t length);
+_hashlib_HASHXOF_digest_impl(HASHobject *self, Py_ssize_t length);
static PyObject *
-EVPXOF_digest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_HASHXOF_digest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
@@ -151,7 +151,7 @@ EVPXOF_digest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject
}
length = ival;
}
- return_value = EVPXOF_digest_impl((EVPobject *)self, length);
+ return_value = _hashlib_HASHXOF_digest_impl((HASHobject *)self, length);
exit:
return return_value;
@@ -161,20 +161,20 @@ exit:
#if defined(PY_OPENSSL_HAS_SHAKE)
-PyDoc_STRVAR(EVPXOF_hexdigest__doc__,
+PyDoc_STRVAR(_hashlib_HASHXOF_hexdigest__doc__,
"hexdigest($self, /, length)\n"
"--\n"
"\n"
"Return the digest value as a string of hexadecimal digits.");
-#define EVPXOF_HEXDIGEST_METHODDEF \
- {"hexdigest", _PyCFunction_CAST(EVPXOF_hexdigest), METH_FASTCALL|METH_KEYWORDS, EVPXOF_hexdigest__doc__},
+#define _HASHLIB_HASHXOF_HEXDIGEST_METHODDEF \
+ {"hexdigest", _PyCFunction_CAST(_hashlib_HASHXOF_hexdigest), METH_FASTCALL|METH_KEYWORDS, _hashlib_HASHXOF_hexdigest__doc__},
static PyObject *
-EVPXOF_hexdigest_impl(EVPobject *self, Py_ssize_t length);
+_hashlib_HASHXOF_hexdigest_impl(HASHobject *self, Py_ssize_t length);
static PyObject *
-EVPXOF_hexdigest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_HASHXOF_hexdigest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
@@ -224,7 +224,7 @@ EVPXOF_hexdigest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObje
}
length = ival;
}
- return_value = EVPXOF_hexdigest_impl((EVPobject *)self, length);
+ return_value = _hashlib_HASHXOF_hexdigest_impl((HASHobject *)self, length);
exit:
return return_value;
@@ -232,8 +232,8 @@ exit:
#endif /* defined(PY_OPENSSL_HAS_SHAKE) */
-PyDoc_STRVAR(EVP_new__doc__,
-"new($module, /, name, string=b\'\', *, usedforsecurity=True)\n"
+PyDoc_STRVAR(_hashlib_HASH_new__doc__,
+"new($module, /, name, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new hash object using the named algorithm.\n"
@@ -243,20 +243,20 @@ PyDoc_STRVAR(EVP_new__doc__,
"\n"
"The MD5 and SHA1 algorithms are always supported.");
-#define EVP_NEW_METHODDEF \
- {"new", _PyCFunction_CAST(EVP_new), METH_FASTCALL|METH_KEYWORDS, EVP_new__doc__},
+#define _HASHLIB_HASH_NEW_METHODDEF \
+ {"new", _PyCFunction_CAST(_hashlib_HASH_new), METH_FASTCALL|METH_KEYWORDS, _hashlib_HASH_new__doc__},
static PyObject *
-EVP_new_impl(PyObject *module, PyObject *name_obj, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_HASH_new_impl(PyObject *module, const char *name, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
-EVP_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
+_hashlib_HASH_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 3
+ #define NUM_KEYWORDS 4
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -265,7 +265,7 @@ EVP_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(name), &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(name), &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -274,30 +274,43 @@ EVP_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwn
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"name", "string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"name", "data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "new",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[3];
+ PyObject *argsbuf[4];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
- PyObject *name_obj;
- PyObject *data_obj = NULL;
+ const char *name;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 1, /*maxpos*/ 2, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
if (!args) {
goto exit;
}
- name_obj = args[0];
+ if (!PyUnicode_Check(args[0])) {
+ _PyArg_BadArgument("new", "argument 'name'", "str", args[0]);
+ goto exit;
+ }
+ Py_ssize_t name_length;
+ name = PyUnicode_AsUTF8AndSize(args[0], &name_length);
+ if (name == NULL) {
+ goto exit;
+ }
+ if (strlen(name) != (size_t)name_length) {
+ PyErr_SetString(PyExc_ValueError, "embedded null character");
+ goto exit;
+ }
if (!noptargs) {
goto skip_optional_pos;
}
if (args[1]) {
- data_obj = args[1];
+ data = args[1];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -306,19 +319,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[2]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[2]) {
+ usedforsecurity = PyObject_IsTrue(args[2]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[3];
skip_optional_kwonly:
- return_value = EVP_new_impl(module, name_obj, data_obj, usedforsecurity);
+ return_value = _hashlib_HASH_new_impl(module, name, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_md5__doc__,
-"openssl_md5($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_md5($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Returns a md5 hash object; optionally initialized with a string");
@@ -327,8 +346,8 @@ PyDoc_STRVAR(_hashlib_openssl_md5__doc__,
{"openssl_md5", _PyCFunction_CAST(_hashlib_openssl_md5), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_md5__doc__},
static PyObject *
-_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_md5_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -336,7 +355,7 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -345,7 +364,7 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -354,17 +373,18 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_md5",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -375,7 +395,7 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -384,19 +404,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_md5_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_md5_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha1__doc__,
-"openssl_sha1($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha1($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Returns a sha1 hash object; optionally initialized with a string");
@@ -405,8 +431,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha1__doc__,
{"openssl_sha1", _PyCFunction_CAST(_hashlib_openssl_sha1), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha1__doc__},
static PyObject *
-_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -414,7 +440,7 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -423,7 +449,7 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -432,17 +458,18 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha1",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -453,7 +480,7 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -462,19 +489,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha1_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha1_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha224__doc__,
-"openssl_sha224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha224($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha224 hash object; optionally initialized with a string");
@@ -483,8 +517,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha224__doc__,
{"openssl_sha224", _PyCFunction_CAST(_hashlib_openssl_sha224), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha224__doc__},
static PyObject *
-_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -492,7 +526,7 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -501,7 +535,7 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -510,17 +544,18 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -531,7 +566,7 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -540,19 +575,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha224_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha224_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha256__doc__,
-"openssl_sha256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha256($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha256 hash object; optionally initialized with a string");
@@ -561,8 +603,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha256__doc__,
{"openssl_sha256", _PyCFunction_CAST(_hashlib_openssl_sha256), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha256__doc__},
static PyObject *
-_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -570,7 +612,7 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -579,7 +621,7 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -588,17 +630,18 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -609,7 +652,7 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -618,19 +661,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha256_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha256_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha384__doc__,
-"openssl_sha384($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha384($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha384 hash object; optionally initialized with a string");
@@ -639,8 +689,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha384__doc__,
{"openssl_sha384", _PyCFunction_CAST(_hashlib_openssl_sha384), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha384__doc__},
static PyObject *
-_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -648,7 +698,7 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -657,7 +707,7 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -666,17 +716,18 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha384",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -687,7 +738,7 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -696,19 +747,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha384_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha384_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha512__doc__,
-"openssl_sha512($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha512($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha512 hash object; optionally initialized with a string");
@@ -717,8 +775,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha512__doc__,
{"openssl_sha512", _PyCFunction_CAST(_hashlib_openssl_sha512), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha512__doc__},
static PyObject *
-_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -726,7 +784,7 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -735,7 +793,7 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -744,17 +802,18 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha512",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -765,7 +824,7 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -774,12 +833,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha512_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha512_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -788,7 +853,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__,
-"openssl_sha3_224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_224($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-224 hash object; optionally initialized with a string");
@@ -797,8 +863,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__,
{"openssl_sha3_224", _PyCFunction_CAST(_hashlib_openssl_sha3_224), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_224__doc__},
static PyObject *
-_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -806,7 +872,7 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -815,7 +881,7 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -824,17 +890,18 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -845,7 +912,7 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -854,12 +921,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_224_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_224_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -870,7 +943,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_256__doc__,
-"openssl_sha3_256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_256($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-256 hash object; optionally initialized with a string");
@@ -879,8 +953,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_256__doc__,
{"openssl_sha3_256", _PyCFunction_CAST(_hashlib_openssl_sha3_256), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_256__doc__},
static PyObject *
-_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -888,7 +962,7 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -897,7 +971,7 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -906,17 +980,18 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -927,7 +1002,7 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -936,12 +1011,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_256_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_256_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -952,7 +1033,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_384__doc__,
-"openssl_sha3_384($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_384($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-384 hash object; optionally initialized with a string");
@@ -961,8 +1043,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_384__doc__,
{"openssl_sha3_384", _PyCFunction_CAST(_hashlib_openssl_sha3_384), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_384__doc__},
static PyObject *
-_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -970,7 +1052,7 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -979,7 +1061,7 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -988,17 +1070,18 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_384",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1009,7 +1092,7 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1018,12 +1101,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_384_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_384_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1034,7 +1123,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_512__doc__,
-"openssl_sha3_512($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_512($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-512 hash object; optionally initialized with a string");
@@ -1043,8 +1133,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_512__doc__,
{"openssl_sha3_512", _PyCFunction_CAST(_hashlib_openssl_sha3_512), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_512__doc__},
static PyObject *
-_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -1052,7 +1142,7 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -1061,7 +1151,7 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -1070,17 +1160,18 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_512",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1091,7 +1182,7 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1100,12 +1191,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_512_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_512_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1116,7 +1213,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHAKE)
PyDoc_STRVAR(_hashlib_openssl_shake_128__doc__,
-"openssl_shake_128($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_shake_128($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a shake-128 variable hash object; optionally initialized with a string");
@@ -1125,8 +1223,8 @@ PyDoc_STRVAR(_hashlib_openssl_shake_128__doc__,
{"openssl_shake_128", _PyCFunction_CAST(_hashlib_openssl_shake_128), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_128__doc__},
static PyObject *
-_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -1134,7 +1232,7 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -1143,7 +1241,7 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -1152,17 +1250,18 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_shake_128",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1173,7 +1272,7 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1182,12 +1281,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_shake_128_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_shake_128_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1198,7 +1303,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHAKE)
PyDoc_STRVAR(_hashlib_openssl_shake_256__doc__,
-"openssl_shake_256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_shake_256($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a shake-256 variable hash object; optionally initialized with a string");
@@ -1207,8 +1313,8 @@ PyDoc_STRVAR(_hashlib_openssl_shake_256__doc__,
{"openssl_shake_256", _PyCFunction_CAST(_hashlib_openssl_shake_256), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_256__doc__},
static PyObject *
-_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -1216,7 +1322,7 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -1225,7 +1331,7 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -1234,17 +1340,18 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_shake_256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1255,7 +1362,7 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1264,12 +1371,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_shake_256_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_shake_256_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1836,13 +1949,13 @@ exit:
return return_value;
}
-#ifndef EVPXOF_DIGEST_METHODDEF
- #define EVPXOF_DIGEST_METHODDEF
-#endif /* !defined(EVPXOF_DIGEST_METHODDEF) */
+#ifndef _HASHLIB_HASHXOF_DIGEST_METHODDEF
+ #define _HASHLIB_HASHXOF_DIGEST_METHODDEF
+#endif /* !defined(_HASHLIB_HASHXOF_DIGEST_METHODDEF) */
-#ifndef EVPXOF_HEXDIGEST_METHODDEF
- #define EVPXOF_HEXDIGEST_METHODDEF
-#endif /* !defined(EVPXOF_HEXDIGEST_METHODDEF) */
+#ifndef _HASHLIB_HASHXOF_HEXDIGEST_METHODDEF
+ #define _HASHLIB_HASHXOF_HEXDIGEST_METHODDEF
+#endif /* !defined(_HASHLIB_HASHXOF_HEXDIGEST_METHODDEF) */
#ifndef _HASHLIB_OPENSSL_SHA3_224_METHODDEF
#define _HASHLIB_OPENSSL_SHA3_224_METHODDEF
@@ -1871,4 +1984,4 @@ exit:
#ifndef _HASHLIB_SCRYPT_METHODDEF
#define _HASHLIB_SCRYPT_METHODDEF
#endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */
-/*[clinic end generated code: output=2c78822e38be64a8 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=29f4aaf01714778e input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_randommodule.c.h b/Modules/clinic/_randommodule.c.h
index 1e989e970c9..2563a16aea0 100644
--- a/Modules/clinic/_randommodule.c.h
+++ b/Modules/clinic/_randommodule.c.h
@@ -3,6 +3,7 @@ preserve
[clinic start generated code]*/
#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION()
+#include "pycore_long.h" // _PyLong_UInt64_Converter()
#include "pycore_modsupport.h" // _PyArg_CheckPositional()
PyDoc_STRVAR(_random_Random_random__doc__,
@@ -124,16 +125,15 @@ PyDoc_STRVAR(_random_Random_getrandbits__doc__,
{"getrandbits", (PyCFunction)_random_Random_getrandbits, METH_O, _random_Random_getrandbits__doc__},
static PyObject *
-_random_Random_getrandbits_impl(RandomObject *self, int k);
+_random_Random_getrandbits_impl(RandomObject *self, uint64_t k);
static PyObject *
_random_Random_getrandbits(PyObject *self, PyObject *arg)
{
PyObject *return_value = NULL;
- int k;
+ uint64_t k;
- k = PyLong_AsInt(arg);
- if (k == -1 && PyErr_Occurred()) {
+ if (!_PyLong_UInt64_Converter(arg, &k)) {
goto exit;
}
Py_BEGIN_CRITICAL_SECTION(self);
@@ -143,4 +143,4 @@ _random_Random_getrandbits(PyObject *self, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=4458b5a69201ebea input=a9049054013a1b77]*/
+/*[clinic end generated code: output=7ce97b2194eecaf7 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_remote_debugging_module.c.h b/Modules/clinic/_remote_debugging_module.c.h
index e83e2fd7fd2..5c313a2d664 100644
--- a/Modules/clinic/_remote_debugging_module.c.h
+++ b/Modules/clinic/_remote_debugging_module.c.h
@@ -10,7 +10,7 @@ preserve
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
PyDoc_STRVAR(_remote_debugging_RemoteUnwinder___init____doc__,
-"RemoteUnwinder(pid, *, all_threads=False)\n"
+"RemoteUnwinder(pid, *, all_threads=False, debug=False)\n"
"--\n"
"\n"
"Initialize a new RemoteUnwinder object for debugging a remote Python process.\n"
@@ -19,6 +19,8 @@ PyDoc_STRVAR(_remote_debugging_RemoteUnwinder___init____doc__,
" pid: Process ID of the target Python process to debug\n"
" all_threads: If True, initialize state for all threads in the process.\n"
" If False, only initialize for the main thread.\n"
+" debug: If True, chain exceptions to explain the sequence of events that\n"
+" lead to the exception.\n"
"\n"
"The RemoteUnwinder provides functionality to inspect and debug a running Python\n"
"process, including examining thread states, stack frames and other runtime data.\n"
@@ -30,7 +32,8 @@ PyDoc_STRVAR(_remote_debugging_RemoteUnwinder___init____doc__,
static int
_remote_debugging_RemoteUnwinder___init___impl(RemoteUnwinderObject *self,
- int pid, int all_threads);
+ int pid, int all_threads,
+ int debug);
static int
_remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObject *kwargs)
@@ -38,7 +41,7 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
int return_value = -1;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -47,7 +50,7 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(pid), &_Py_ID(all_threads), },
+ .ob_item = { &_Py_ID(pid), &_Py_ID(all_threads), &_Py_ID(debug), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -56,19 +59,20 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"pid", "all_threads", NULL};
+ static const char * const _keywords[] = {"pid", "all_threads", "debug", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "RemoteUnwinder",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 1;
int pid;
int all_threads = 0;
+ int debug = 0;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
/*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -82,12 +86,21 @@ _remote_debugging_RemoteUnwinder___init__(PyObject *self, PyObject *args, PyObje
if (!noptargs) {
goto skip_optional_kwonly;
}
- all_threads = PyObject_IsTrue(fastargs[1]);
- if (all_threads < 0) {
+ if (fastargs[1]) {
+ all_threads = PyObject_IsTrue(fastargs[1]);
+ if (all_threads < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
+ }
+ debug = PyObject_IsTrue(fastargs[2]);
+ if (debug < 0) {
goto exit;
}
skip_optional_kwonly:
- return_value = _remote_debugging_RemoteUnwinder___init___impl((RemoteUnwinderObject *)self, pid, all_threads);
+ return_value = _remote_debugging_RemoteUnwinder___init___impl((RemoteUnwinderObject *)self, pid, all_threads, debug);
exit:
return return_value;
@@ -240,4 +253,4 @@ _remote_debugging_RemoteUnwinder_get_async_stack_trace(PyObject *self, PyObject
return return_value;
}
-/*[clinic end generated code: output=654772085f1f4bf6 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=774ec34aa653402d input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h
index b0fc1f1a89b..bd685e75d93 100644
--- a/Modules/clinic/_winapi.c.h
+++ b/Modules/clinic/_winapi.c.h
@@ -1933,6 +1933,24 @@ _winapi_GetACP(PyObject *module, PyObject *Py_UNUSED(ignored))
return _winapi_GetACP_impl(module);
}
+PyDoc_STRVAR(_winapi_GetOEMCP__doc__,
+"GetOEMCP($module, /)\n"
+"--\n"
+"\n"
+"Get the current Windows ANSI code page identifier.");
+
+#define _WINAPI_GETOEMCP_METHODDEF \
+ {"GetOEMCP", (PyCFunction)_winapi_GetOEMCP, METH_NOARGS, _winapi_GetOEMCP__doc__},
+
+static PyObject *
+_winapi_GetOEMCP_impl(PyObject *module);
+
+static PyObject *
+_winapi_GetOEMCP(PyObject *module, PyObject *Py_UNUSED(ignored))
+{
+ return _winapi_GetOEMCP_impl(module);
+}
+
PyDoc_STRVAR(_winapi_GetFileType__doc__,
"GetFileType($module, /, handle)\n"
"--\n"
@@ -2169,4 +2187,4 @@ exit:
#ifndef _WINAPI_GETSHORTPATHNAME_METHODDEF
#define _WINAPI_GETSHORTPATHNAME_METHODDEF
#endif /* !defined(_WINAPI_GETSHORTPATHNAME_METHODDEF) */
-/*[clinic end generated code: output=ede63eaaf63aa7e6 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=4581fd481c3c6293 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/blake2module.c.h b/Modules/clinic/blake2module.c.h
index bb2e308574a..9e9cd56e569 100644
--- a/Modules/clinic/blake2module.c.h
+++ b/Modules/clinic/blake2module.c.h
@@ -10,20 +10,21 @@ preserve
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
PyDoc_STRVAR(py_blake2b_new__doc__,
-"blake2b(data=b\'\', /, *, digest_size=_blake2.blake2b.MAX_DIGEST_SIZE,\n"
+"blake2b(data=b\'\', *, digest_size=_blake2.blake2b.MAX_DIGEST_SIZE,\n"
" key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n"
" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n"
-" usedforsecurity=True)\n"
+" usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new BLAKE2b hash object.");
static PyObject *
-py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2b_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity);
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string);
static PyObject *
py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
@@ -31,7 +32,7 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 12
+ #define NUM_KEYWORDS 14
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -40,7 +41,7 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -49,18 +50,18 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "blake2b",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[13];
+ PyObject *argsbuf[14];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
- PyObject *data = NULL;
+ PyObject *data_obj = NULL;
int digest_size = HACL_HASH_BLAKE2B_OUT_BYTES;
Py_buffer key = {NULL, NULL};
Py_buffer salt = {NULL, NULL};
@@ -73,18 +74,23 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
int inner_size = 0;
int last_node = 0;
int usedforsecurity = 1;
+ PyObject *string = NULL;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
if (!fastargs) {
goto exit;
}
- if (nargs < 1) {
- goto skip_optional_posonly;
+ if (!noptargs) {
+ goto skip_optional_pos;
}
- noptargs--;
- data = fastargs[0];
-skip_optional_posonly:
+ if (fastargs[0]) {
+ data_obj = fastargs[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
@@ -182,12 +188,18 @@ skip_optional_posonly:
goto skip_optional_kwonly;
}
}
- usedforsecurity = PyObject_IsTrue(fastargs[12]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (fastargs[12]) {
+ usedforsecurity = PyObject_IsTrue(fastargs[12]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = fastargs[13];
skip_optional_kwonly:
- return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
+ return_value = py_blake2b_new_impl(type, data_obj, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity, string);
exit:
/* Cleanup for key */
@@ -207,20 +219,21 @@ exit:
}
PyDoc_STRVAR(py_blake2s_new__doc__,
-"blake2s(data=b\'\', /, *, digest_size=_blake2.blake2s.MAX_DIGEST_SIZE,\n"
+"blake2s(data=b\'\', *, digest_size=_blake2.blake2s.MAX_DIGEST_SIZE,\n"
" key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n"
" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n"
-" usedforsecurity=True)\n"
+" usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new BLAKE2s hash object.");
static PyObject *
-py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2s_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity);
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string);
static PyObject *
py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
@@ -228,7 +241,7 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 12
+ #define NUM_KEYWORDS 14
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -237,7 +250,7 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -246,18 +259,18 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "blake2s",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[13];
+ PyObject *argsbuf[14];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
- PyObject *data = NULL;
+ PyObject *data_obj = NULL;
int digest_size = HACL_HASH_BLAKE2S_OUT_BYTES;
Py_buffer key = {NULL, NULL};
Py_buffer salt = {NULL, NULL};
@@ -270,18 +283,23 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
int inner_size = 0;
int last_node = 0;
int usedforsecurity = 1;
+ PyObject *string = NULL;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
if (!fastargs) {
goto exit;
}
- if (nargs < 1) {
- goto skip_optional_posonly;
+ if (!noptargs) {
+ goto skip_optional_pos;
}
- noptargs--;
- data = fastargs[0];
-skip_optional_posonly:
+ if (fastargs[0]) {
+ data_obj = fastargs[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
@@ -379,12 +397,18 @@ skip_optional_posonly:
goto skip_optional_kwonly;
}
}
- usedforsecurity = PyObject_IsTrue(fastargs[12]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (fastargs[12]) {
+ usedforsecurity = PyObject_IsTrue(fastargs[12]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = fastargs[13];
skip_optional_kwonly:
- return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
+ return_value = py_blake2s_new_impl(type, data_obj, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity, string);
exit:
/* Cleanup for key */
@@ -478,4 +502,4 @@ _blake2_blake2b_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored))
{
return _blake2_blake2b_hexdigest_impl((Blake2Object *)self);
}
-/*[clinic end generated code: output=d30e8293bd8e2950 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=eed18dcfaf6f7731 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/mathmodule.c.h b/Modules/clinic/mathmodule.c.h
index 9df73b187bb..fbb012fb6dd 100644
--- a/Modules/clinic/mathmodule.c.h
+++ b/Modules/clinic/mathmodule.c.h
@@ -628,6 +628,74 @@ exit:
return return_value;
}
+PyDoc_STRVAR(math_isnormal__doc__,
+"isnormal($module, x, /)\n"
+"--\n"
+"\n"
+"Return True if x is normal, and False otherwise.");
+
+#define MATH_ISNORMAL_METHODDEF \
+ {"isnormal", (PyCFunction)math_isnormal, METH_O, math_isnormal__doc__},
+
+static PyObject *
+math_isnormal_impl(PyObject *module, double x);
+
+static PyObject *
+math_isnormal(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ double x;
+
+ if (PyFloat_CheckExact(arg)) {
+ x = PyFloat_AS_DOUBLE(arg);
+ }
+ else
+ {
+ x = PyFloat_AsDouble(arg);
+ if (x == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ return_value = math_isnormal_impl(module, x);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(math_issubnormal__doc__,
+"issubnormal($module, x, /)\n"
+"--\n"
+"\n"
+"Return True if x is subnormal, and False otherwise.");
+
+#define MATH_ISSUBNORMAL_METHODDEF \
+ {"issubnormal", (PyCFunction)math_issubnormal, METH_O, math_issubnormal__doc__},
+
+static PyObject *
+math_issubnormal_impl(PyObject *module, double x);
+
+static PyObject *
+math_issubnormal(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ double x;
+
+ if (PyFloat_CheckExact(arg)) {
+ x = PyFloat_AS_DOUBLE(arg);
+ }
+ else
+ {
+ x = PyFloat_AsDouble(arg);
+ if (x == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ return_value = math_issubnormal_impl(module, x);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(math_isnan__doc__,
"isnan($module, x, /)\n"
"--\n"
@@ -1110,4 +1178,4 @@ math_ulp(PyObject *module, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=77e7b8c161c39843 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=44bba3a0a052a364 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/md5module.c.h b/Modules/clinic/md5module.c.h
index 9ca4f6528ce..f76902586dd 100644
--- a/Modules/clinic/md5module.c.h
+++ b/Modules/clinic/md5module.c.h
@@ -89,7 +89,7 @@ MD5Type_update(PyObject *self, PyObject *obj)
}
PyDoc_STRVAR(_md5_md5__doc__,
-"md5($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"md5($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new MD5 hash object; optionally initialized with a string.");
@@ -98,7 +98,8 @@ PyDoc_STRVAR(_md5_md5__doc__,
{"md5", _PyCFunction_CAST(_md5_md5), METH_FASTCALL|METH_KEYWORDS, _md5_md5__doc__},
static PyObject *
-_md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_md5_md5_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -106,7 +107,7 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -115,7 +116,7 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -124,17 +125,18 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "md5",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -145,7 +147,7 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -154,14 +156,20 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _md5_md5_impl(module, string, usedforsecurity);
+ return_value = _md5_md5_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
-/*[clinic end generated code: output=73f4d2034d9fcc63 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=920fe54b9ed06f92 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha1module.c.h b/Modules/clinic/sha1module.c.h
index 3e5fd1a41ce..4a58d0cd9b8 100644
--- a/Modules/clinic/sha1module.c.h
+++ b/Modules/clinic/sha1module.c.h
@@ -89,7 +89,7 @@ SHA1Type_update(PyObject *self, PyObject *obj)
}
PyDoc_STRVAR(_sha1_sha1__doc__,
-"sha1($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha1($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA1 hash object; optionally initialized with a string.");
@@ -98,7 +98,8 @@ PyDoc_STRVAR(_sha1_sha1__doc__,
{"sha1", _PyCFunction_CAST(_sha1_sha1), METH_FASTCALL|METH_KEYWORDS, _sha1_sha1__doc__},
static PyObject *
-_sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha1_sha1_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -106,7 +107,7 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -115,7 +116,7 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -124,17 +125,18 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha1",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -145,7 +147,7 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -154,14 +156,20 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha1_sha1_impl(module, string, usedforsecurity);
+ return_value = _sha1_sha1_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
-/*[clinic end generated code: output=06161e87e2d645d4 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=fd5a917404b68c4f input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha2module.c.h b/Modules/clinic/sha2module.c.h
index 26612125e75..07be91e4f6c 100644
--- a/Modules/clinic/sha2module.c.h
+++ b/Modules/clinic/sha2module.c.h
@@ -169,7 +169,7 @@ SHA512Type_update(PyObject *self, PyObject *obj)
}
PyDoc_STRVAR(_sha2_sha256__doc__,
-"sha256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha256($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-256 hash object; optionally initialized with a string.");
@@ -178,7 +178,8 @@ PyDoc_STRVAR(_sha2_sha256__doc__,
{"sha256", _PyCFunction_CAST(_sha2_sha256), METH_FASTCALL|METH_KEYWORDS, _sha2_sha256__doc__},
static PyObject *
-_sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha256_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -186,7 +187,7 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -195,7 +196,7 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -204,17 +205,18 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -225,7 +227,7 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -234,19 +236,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha256_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha256_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
PyDoc_STRVAR(_sha2_sha224__doc__,
-"sha224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha224($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-224 hash object; optionally initialized with a string.");
@@ -255,7 +263,8 @@ PyDoc_STRVAR(_sha2_sha224__doc__,
{"sha224", _PyCFunction_CAST(_sha2_sha224), METH_FASTCALL|METH_KEYWORDS, _sha2_sha224__doc__},
static PyObject *
-_sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha224_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -263,7 +272,7 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -272,7 +281,7 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -281,17 +290,18 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -302,7 +312,7 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -311,19 +321,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha224_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha224_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
PyDoc_STRVAR(_sha2_sha512__doc__,
-"sha512($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha512($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-512 hash object; optionally initialized with a string.");
@@ -332,7 +348,8 @@ PyDoc_STRVAR(_sha2_sha512__doc__,
{"sha512", _PyCFunction_CAST(_sha2_sha512), METH_FASTCALL|METH_KEYWORDS, _sha2_sha512__doc__},
static PyObject *
-_sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha512_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -340,7 +357,7 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -349,7 +366,7 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -358,17 +375,18 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha512",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -379,7 +397,7 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -388,19 +406,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha512_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha512_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
PyDoc_STRVAR(_sha2_sha384__doc__,
-"sha384($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha384($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-384 hash object; optionally initialized with a string.");
@@ -409,7 +433,8 @@ PyDoc_STRVAR(_sha2_sha384__doc__,
{"sha384", _PyCFunction_CAST(_sha2_sha384), METH_FASTCALL|METH_KEYWORDS, _sha2_sha384__doc__},
static PyObject *
-_sha2_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha384_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -417,7 +442,7 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -426,7 +451,7 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -435,17 +460,18 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha384",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -456,7 +482,7 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -465,14 +491,20 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha384_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha384_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
-/*[clinic end generated code: output=af11090855b7c85a input=a9049054013a1b77]*/
+/*[clinic end generated code: output=90625b237c774a9f input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha3module.c.h b/Modules/clinic/sha3module.c.h
index 25f72b74f80..121be2c0758 100644
--- a/Modules/clinic/sha3module.c.h
+++ b/Modules/clinic/sha3module.c.h
@@ -10,13 +10,14 @@ preserve
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
PyDoc_STRVAR(py_sha3_new__doc__,
-"sha3_224(data=b\'\', /, *, usedforsecurity=True)\n"
+"sha3_224(data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA3 hash object.");
static PyObject *
-py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity);
+py_sha3_new_impl(PyTypeObject *type, PyObject *data_obj, int usedforsecurity,
+ PyObject *string);
static PyObject *
py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
@@ -24,7 +25,7 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 1
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -33,7 +34,7 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -42,40 +43,51 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha3_224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
- PyObject *data = NULL;
+ PyObject *data_obj = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
if (!fastargs) {
goto exit;
}
- if (nargs < 1) {
- goto skip_optional_posonly;
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (fastargs[0]) {
+ data_obj = fastargs[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
}
- noptargs--;
- data = fastargs[0];
-skip_optional_posonly:
+skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(fastargs[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (fastargs[1]) {
+ usedforsecurity = PyObject_IsTrue(fastargs[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = fastargs[2];
skip_optional_kwonly:
- return_value = py_sha3_new_impl(type, data, usedforsecurity);
+ return_value = py_sha3_new_impl(type, data_obj, usedforsecurity, string);
exit:
return return_value;
@@ -158,24 +170,57 @@ _sha3_sha3_224_update(PyObject *self, PyObject *data)
}
PyDoc_STRVAR(_sha3_shake_128_digest__doc__,
-"digest($self, length, /)\n"
+"digest($self, /, length)\n"
"--\n"
"\n"
"Return the digest value as a bytes object.");
#define _SHA3_SHAKE_128_DIGEST_METHODDEF \
- {"digest", (PyCFunction)_sha3_shake_128_digest, METH_O, _sha3_shake_128_digest__doc__},
+ {"digest", _PyCFunction_CAST(_sha3_shake_128_digest), METH_FASTCALL|METH_KEYWORDS, _sha3_shake_128_digest__doc__},
static PyObject *
_sha3_shake_128_digest_impl(SHA3object *self, unsigned long length);
static PyObject *
-_sha3_shake_128_digest(PyObject *self, PyObject *arg)
+_sha3_shake_128_digest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ Py_hash_t ob_hash;
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_hash = -1,
+ .ob_item = { &_Py_ID(length), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"length", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "digest",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
unsigned long length;
- if (!_PyLong_UnsignedLong_Converter(arg, &length)) {
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
+ /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!_PyLong_UnsignedLong_Converter(args[0], &length)) {
goto exit;
}
return_value = _sha3_shake_128_digest_impl((SHA3object *)self, length);
@@ -185,24 +230,57 @@ exit:
}
PyDoc_STRVAR(_sha3_shake_128_hexdigest__doc__,
-"hexdigest($self, length, /)\n"
+"hexdigest($self, /, length)\n"
"--\n"
"\n"
"Return the digest value as a string of hexadecimal digits.");
#define _SHA3_SHAKE_128_HEXDIGEST_METHODDEF \
- {"hexdigest", (PyCFunction)_sha3_shake_128_hexdigest, METH_O, _sha3_shake_128_hexdigest__doc__},
+ {"hexdigest", _PyCFunction_CAST(_sha3_shake_128_hexdigest), METH_FASTCALL|METH_KEYWORDS, _sha3_shake_128_hexdigest__doc__},
static PyObject *
_sha3_shake_128_hexdigest_impl(SHA3object *self, unsigned long length);
static PyObject *
-_sha3_shake_128_hexdigest(PyObject *self, PyObject *arg)
+_sha3_shake_128_hexdigest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ Py_hash_t ob_hash;
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_hash = -1,
+ .ob_item = { &_Py_ID(length), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"length", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "hexdigest",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
unsigned long length;
- if (!_PyLong_UnsignedLong_Converter(arg, &length)) {
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
+ /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!_PyLong_UnsignedLong_Converter(args[0], &length)) {
goto exit;
}
return_value = _sha3_shake_128_hexdigest_impl((SHA3object *)self, length);
@@ -210,4 +288,4 @@ _sha3_shake_128_hexdigest(PyObject *self, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=5b3ac1c06c6899ea input=a9049054013a1b77]*/
+/*[clinic end generated code: output=65e437799472b89f input=a9049054013a1b77]*/
diff --git a/Modules/clinic/zlibmodule.c.h b/Modules/clinic/zlibmodule.c.h
index 2710f65a840..146a7e25001 100644
--- a/Modules/clinic/zlibmodule.c.h
+++ b/Modules/clinic/zlibmodule.c.h
@@ -1044,6 +1044,65 @@ exit:
return return_value;
}
+PyDoc_STRVAR(zlib_adler32_combine__doc__,
+"adler32_combine($module, adler1, adler2, len2, /)\n"
+"--\n"
+"\n"
+"Combine two Adler-32 checksums into one.\n"
+"\n"
+" adler1\n"
+" Adler-32 checksum for sequence A\n"
+" adler2\n"
+" Adler-32 checksum for sequence B\n"
+" len2\n"
+" Length of sequence B\n"
+"\n"
+"Given the Adler-32 checksum \'adler1\' of a sequence A and the\n"
+"Adler-32 checksum \'adler2\' of a sequence B of length \'len2\',\n"
+"return the Adler-32 checksum of A and B concatenated.");
+
+#define ZLIB_ADLER32_COMBINE_METHODDEF \
+ {"adler32_combine", _PyCFunction_CAST(zlib_adler32_combine), METH_FASTCALL, zlib_adler32_combine__doc__},
+
+static unsigned int
+zlib_adler32_combine_impl(PyObject *module, unsigned int adler1,
+ unsigned int adler2, PyObject *len2);
+
+static PyObject *
+zlib_adler32_combine(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ unsigned int adler1;
+ unsigned int adler2;
+ PyObject *len2;
+ unsigned int _return_value;
+
+ if (!_PyArg_CheckPositional("adler32_combine", nargs, 3, 3)) {
+ goto exit;
+ }
+ adler1 = (unsigned int)PyLong_AsUnsignedLongMask(args[0]);
+ if (adler1 == (unsigned int)-1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ adler2 = (unsigned int)PyLong_AsUnsignedLongMask(args[1]);
+ if (adler2 == (unsigned int)-1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (!PyLong_Check(args[2])) {
+ _PyArg_BadArgument("adler32_combine", "argument 3", "int", args[2]);
+ goto exit;
+ }
+ len2 = args[2];
+ _return_value = zlib_adler32_combine_impl(module, adler1, adler2, len2);
+ if ((_return_value == (unsigned int)-1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyLong_FromUnsignedLong((unsigned long)_return_value);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(zlib_crc32__doc__,
"crc32($module, data, value=0, /)\n"
"--\n"
@@ -1098,6 +1157,65 @@ exit:
return return_value;
}
+PyDoc_STRVAR(zlib_crc32_combine__doc__,
+"crc32_combine($module, crc1, crc2, len2, /)\n"
+"--\n"
+"\n"
+"Combine two CRC-32 checksums into one.\n"
+"\n"
+" crc1\n"
+" CRC-32 checksum for sequence A\n"
+" crc2\n"
+" CRC-32 checksum for sequence B\n"
+" len2\n"
+" Length of sequence B\n"
+"\n"
+"Given the CRC-32 checksum \'crc1\' of a sequence A and the\n"
+"CRC-32 checksum \'crc2\' of a sequence B of length \'len2\',\n"
+"return the CRC-32 checksum of A and B concatenated.");
+
+#define ZLIB_CRC32_COMBINE_METHODDEF \
+ {"crc32_combine", _PyCFunction_CAST(zlib_crc32_combine), METH_FASTCALL, zlib_crc32_combine__doc__},
+
+static unsigned int
+zlib_crc32_combine_impl(PyObject *module, unsigned int crc1,
+ unsigned int crc2, PyObject *len2);
+
+static PyObject *
+zlib_crc32_combine(PyObject *module, PyObject *const *args, Py_ssize_t nargs)
+{
+ PyObject *return_value = NULL;
+ unsigned int crc1;
+ unsigned int crc2;
+ PyObject *len2;
+ unsigned int _return_value;
+
+ if (!_PyArg_CheckPositional("crc32_combine", nargs, 3, 3)) {
+ goto exit;
+ }
+ crc1 = (unsigned int)PyLong_AsUnsignedLongMask(args[0]);
+ if (crc1 == (unsigned int)-1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ crc2 = (unsigned int)PyLong_AsUnsignedLongMask(args[1]);
+ if (crc2 == (unsigned int)-1 && PyErr_Occurred()) {
+ goto exit;
+ }
+ if (!PyLong_Check(args[2])) {
+ _PyArg_BadArgument("crc32_combine", "argument 3", "int", args[2]);
+ goto exit;
+ }
+ len2 = args[2];
+ _return_value = zlib_crc32_combine_impl(module, crc1, crc2, len2);
+ if ((_return_value == (unsigned int)-1) && PyErr_Occurred()) {
+ goto exit;
+ }
+ return_value = PyLong_FromUnsignedLong((unsigned long)_return_value);
+
+exit:
+ return return_value;
+}
+
#ifndef ZLIB_COMPRESS_COPY_METHODDEF
#define ZLIB_COMPRESS_COPY_METHODDEF
#endif /* !defined(ZLIB_COMPRESS_COPY_METHODDEF) */
@@ -1121,4 +1239,4 @@ exit:
#ifndef ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF
#define ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF
#endif /* !defined(ZLIB_DECOMPRESS___DEEPCOPY___METHODDEF) */
-/*[clinic end generated code: output=33938c7613a8c1c7 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=3f7692eb3b5d5a0c input=a9049054013a1b77]*/
diff --git a/Modules/faulthandler.c b/Modules/faulthandler.c
index c94f4f66366..73bea8172c7 100644
--- a/Modules/faulthandler.c
+++ b/Modules/faulthandler.c
@@ -6,7 +6,6 @@
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_runtime.h" // _Py_ID()
#include "pycore_signal.h" // Py_NSIG
-#include "pycore_sysmodule.h" // _PySys_GetRequiredAttr()
#include "pycore_time.h" // _PyTime_FromSecondsObject()
#include "pycore_traceback.h" // _Py_DumpTracebackThreads
#ifdef HAVE_UNISTD_H
@@ -98,7 +97,7 @@ faulthandler_get_fileno(PyObject **file_ptr)
PyObject *file = *file_ptr;
if (file == NULL || file == Py_None) {
- file = _PySys_GetRequiredAttr(&_Py_ID(stderr));
+ file = PySys_GetAttr(&_Py_ID(stderr));
if (file == NULL) {
return -1;
}
diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c
index 8b6379f1e65..90363b9dca3 100644
--- a/Modules/fcntlmodule.c
+++ b/Modules/fcntlmodule.c
@@ -128,7 +128,7 @@ fcntl_fcntl_impl(PyObject *module, int fd, int code, PyObject *arg)
Py_END_ALLOW_THREADS
} while (ret == -1 && errno == EINTR && !(async_err = PyErr_CheckSignals()));
if (ret < 0) {
- if (async_err) {
+ if (!async_err) {
PyErr_SetFromErrno(PyExc_OSError);
}
Py_DECREF(result);
@@ -136,6 +136,7 @@ fcntl_fcntl_impl(PyObject *module, int fd, int code, PyObject *arg)
}
if (ptr[len] != '\0') {
PyErr_SetString(PyExc_SystemError, "buffer overflow");
+ Py_DECREF(result);
return NULL;
}
return result;
@@ -310,7 +311,7 @@ fcntl_ioctl_impl(PyObject *module, int fd, unsigned long code, PyObject *arg,
Py_END_ALLOW_THREADS
} while (ret == -1 && errno == EINTR && !(async_err = PyErr_CheckSignals()));
if (ret < 0) {
- if (async_err) {
+ if (!async_err) {
PyErr_SetFromErrno(PyExc_OSError);
}
Py_DECREF(result);
@@ -318,6 +319,7 @@ fcntl_ioctl_impl(PyObject *module, int fd, unsigned long code, PyObject *arg,
}
if (ptr[len] != '\0') {
PyErr_SetString(PyExc_SystemError, "buffer overflow");
+ Py_DECREF(result);
return NULL;
}
return result;
diff --git a/Modules/hashlib.h b/Modules/hashlib.h
index 7105e68af7b..e82ec92be25 100644
--- a/Modules/hashlib.h
+++ b/Modules/hashlib.h
@@ -76,3 +76,41 @@
* to allow the user to optimize based on the platform they're using. */
#define HASHLIB_GIL_MINSIZE 2048
+static inline int
+_Py_hashlib_data_argument(PyObject **res, PyObject *data, PyObject *string)
+{
+ if (data != NULL && string == NULL) {
+ // called as H(data) or H(data=...)
+ *res = data;
+ return 1;
+ }
+ else if (data == NULL && string != NULL) {
+ // called as H(string=...)
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "the 'string' keyword parameter is deprecated since "
+ "Python 3.15 and slated for removal in Python 3.19; "
+ "use the 'data' keyword parameter or pass the data "
+ "to hash as a positional argument instead", 1) < 0)
+ {
+ *res = NULL;
+ return -1;
+ }
+ *res = string;
+ return 1;
+ }
+ else if (data == NULL && string == NULL) {
+ // fast path when no data is given
+ assert(!PyErr_Occurred());
+ *res = NULL;
+ return 0;
+ }
+ else {
+ // called as H(data=..., string)
+ *res = NULL;
+ PyErr_SetString(PyExc_TypeError,
+ "'data' and 'string' are mutually exclusive "
+ "and support for 'string' keyword parameter "
+ "is slated for removal in a future version.");
+ return -1;
+ }
+}
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index 943c1e8607b..2003546ce84 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -1124,7 +1124,6 @@ typedef struct {
PyObject *it;
PyObject *saved;
Py_ssize_t index;
- int firstpass;
} cycleobject;
#define cycleobject_CAST(op) ((cycleobject *)(op))
@@ -1165,8 +1164,7 @@ itertools_cycle_impl(PyTypeObject *type, PyObject *iterable)
}
lz->it = it;
lz->saved = saved;
- lz->index = 0;
- lz->firstpass = 0;
+ lz->index = -1;
return (PyObject *)lz;
}
@@ -1199,11 +1197,11 @@ cycle_next(PyObject *op)
cycleobject *lz = cycleobject_CAST(op);
PyObject *item;
- if (lz->it != NULL) {
+ Py_ssize_t index = FT_ATOMIC_LOAD_SSIZE_RELAXED(lz->index);
+
+ if (index < 0) {
item = PyIter_Next(lz->it);
if (item != NULL) {
- if (lz->firstpass)
- return item;
if (PyList_Append(lz->saved, item)) {
Py_DECREF(item);
return NULL;
@@ -1213,15 +1211,22 @@ cycle_next(PyObject *op)
/* Note: StopIteration is already cleared by PyIter_Next() */
if (PyErr_Occurred())
return NULL;
+ index = 0;
+ FT_ATOMIC_STORE_SSIZE_RELAXED(lz->index, 0);
+#ifndef Py_GIL_DISABLED
Py_CLEAR(lz->it);
+#endif
}
if (PyList_GET_SIZE(lz->saved) == 0)
return NULL;
- item = PyList_GET_ITEM(lz->saved, lz->index);
- lz->index++;
- if (lz->index >= PyList_GET_SIZE(lz->saved))
- lz->index = 0;
- return Py_NewRef(item);
+ item = PyList_GetItemRef(lz->saved, index);
+ assert(item);
+ index++;
+ if (index >= PyList_GET_SIZE(lz->saved)) {
+ index = 0;
+ }
+ FT_ATOMIC_STORE_SSIZE_RELAXED(lz->index, index);
+ return item;
}
static PyType_Slot cycle_slots[] = {
diff --git a/Modules/main.c b/Modules/main.c
index 2d7ed25f5f9..74e48c94732 100644
--- a/Modules/main.c
+++ b/Modules/main.c
@@ -497,16 +497,13 @@ error:
static int
pymain_run_interactive_hook(int *exitcode)
{
- PyObject *hook = PyImport_ImportModuleAttrString("sys",
- "__interactivehook__");
- if (hook == NULL) {
- if (PyErr_ExceptionMatches(PyExc_AttributeError)) {
- // no sys.__interactivehook__ attribute
- PyErr_Clear();
- return 0;
- }
+ PyObject *hook;
+ if (PySys_GetOptionalAttrString("__interactivehook__", &hook) < 0) {
goto error;
}
+ if (hook == NULL) {
+ return 0;
+ }
if (PySys_Audit("cpython.run_interactivehook", "O", hook) < 0) {
goto error;
diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c
index 40abd69f0a6..bbbb4911568 100644
--- a/Modules/mathmodule.c
+++ b/Modules/mathmodule.c
@@ -2161,6 +2161,27 @@ math_ldexp_impl(PyObject *module, double x, PyObject *i)
} else {
errno = 0;
r = ldexp(x, (int)exp);
+#ifdef _MSC_VER
+ if (DBL_MIN > r && r > -DBL_MIN) {
+ /* Denormal (or zero) results can be incorrectly rounded here (rather,
+ truncated). Fixed in newer versions of the C runtime, included
+ with Windows 11. */
+ int original_exp;
+ frexp(x, &original_exp);
+ if (original_exp > DBL_MIN_EXP) {
+ /* Shift down to the smallest normal binade. No bits lost. */
+ int shift = DBL_MIN_EXP - original_exp;
+ x = ldexp(x, shift);
+ exp -= shift;
+ }
+ /* Multiplying by 2**exp finishes the job, and the HW will round as
+ appropriate. Note: if exp < -DBL_MANT_DIG, all of x is shifted
+ to be < 0.5ULP of smallest denorm, so should be thrown away. If
+ exp is so very negative that ldexp underflows to 0, that's fine;
+ no need to check in advance. */
+ r = x*ldexp(1.0, (int)exp);
+ }
+#endif
if (isinf(r))
errno = ERANGE;
}
@@ -3098,6 +3119,44 @@ math_isfinite_impl(PyObject *module, double x)
/*[clinic input]
+math.isnormal
+
+ x: double
+ /
+
+Return True if x is normal, and False otherwise.
+[clinic start generated code]*/
+
+static PyObject *
+math_isnormal_impl(PyObject *module, double x)
+/*[clinic end generated code: output=c7b302b5b89c3541 input=fdaa00c58aa7bc17]*/
+{
+ return PyBool_FromLong(isnormal(x));
+}
+
+
+/*[clinic input]
+math.issubnormal
+
+ x: double
+ /
+
+Return True if x is subnormal, and False otherwise.
+[clinic start generated code]*/
+
+static PyObject *
+math_issubnormal_impl(PyObject *module, double x)
+/*[clinic end generated code: output=4e76ac98ddcae761 input=9a20aba7107d0d95]*/
+{
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 202311L
+ return PyBool_FromLong(issubnormal(x));
+#else
+ return PyBool_FromLong(isfinite(x) && x && !isnormal(x));
+#endif
+}
+
+
+/*[clinic input]
math.isnan
x: double
@@ -4124,6 +4183,8 @@ static PyMethodDef math_methods[] = {
MATH_HYPOT_METHODDEF
MATH_ISCLOSE_METHODDEF
MATH_ISFINITE_METHODDEF
+ MATH_ISNORMAL_METHODDEF
+ MATH_ISSUBNORMAL_METHODDEF
MATH_ISINF_METHODDEF
MATH_ISNAN_METHODDEF
MATH_ISQRT_METHODDEF
diff --git a/Modules/md5module.c b/Modules/md5module.c
index c36eb41d4d2..9b5ea2d6e02 100644
--- a/Modules/md5module.c
+++ b/Modules/md5module.c
@@ -276,17 +276,24 @@ static PyType_Spec md5_type_spec = {
/*[clinic input]
_md5.md5
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new MD5 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=587071f76254a4ac input=7a144a1905636985]*/
+_md5_md5_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=d45e187d3d16f3a8 input=7ea5c5366dbb44bf]*/
{
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
+
MD5object *new;
Py_buffer buf;
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 588894adeac..7dc5ef39a56 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -8820,14 +8820,14 @@ os_ptsname_impl(PyObject *module, int fd)
#if defined(HAVE_OPENPTY) || defined(HAVE_FORKPTY) || defined(HAVE_LOGIN_TTY) || defined(HAVE_DEV_PTMX)
#ifdef HAVE_PTY_H
#include <pty.h>
-#ifdef HAVE_UTMP_H
-#include <utmp.h>
-#endif /* HAVE_UTMP_H */
#elif defined(HAVE_LIBUTIL_H)
#include <libutil.h>
#elif defined(HAVE_UTIL_H)
#include <util.h>
#endif /* HAVE_PTY_H */
+#ifdef HAVE_UTMP_H
+#include <utmp.h>
+#endif /* HAVE_UTMP_H */
#ifdef HAVE_STROPTS_H
#include <stropts.h>
#endif
@@ -9562,6 +9562,24 @@ os_getlogin_impl(PyObject *module)
}
else
result = PyErr_SetFromWindowsErr(GetLastError());
+#elif defined (HAVE_GETLOGIN_R)
+# if defined (HAVE_MAXLOGNAME)
+ char name[MAXLOGNAME + 1];
+# elif defined (HAVE_UT_NAMESIZE)
+ char name[UT_NAMESIZE + 1];
+# else
+ char name[256];
+# endif
+ int err = getlogin_r(name, sizeof(name));
+ if (err) {
+ int old_errno = errno;
+ errno = -err;
+ posix_error();
+ errno = old_errno;
+ }
+ else {
+ result = PyUnicode_DecodeFSDefault(name);
+ }
#else
char *name;
int old_errno = errno;
diff --git a/Modules/sha1module.c b/Modules/sha1module.c
index f4a00cdb422..a746bf74f8d 100644
--- a/Modules/sha1module.c
+++ b/Modules/sha1module.c
@@ -272,19 +272,25 @@ static PyType_Spec sha1_type_spec = {
/*[clinic input]
_sha1.sha1
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA1 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=6f8b3af05126e18e input=bd54b68e2bf36a8a]*/
+_sha1_sha1_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=0d453775924f88a7 input=807f25264e0ac656]*/
{
SHA1object *new;
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
diff --git a/Modules/sha2module.c b/Modules/sha2module.c
index e88d7cb2d45..72931910c5d 100644
--- a/Modules/sha2module.c
+++ b/Modules/sha2module.c
@@ -594,18 +594,24 @@ static PyType_Spec sha512_type_spec = {
/*[clinic input]
_sha2.sha256
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-256 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=243c9dd289931f87 input=6249da1de607280a]*/
+_sha2_sha256_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=49828a7bcd418f45 input=9ce1d70e669abc14]*/
{
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
@@ -651,18 +657,25 @@ _sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity)
/*[clinic input]
_sha2.sha224
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-224 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=68191f232e4a3843 input=c42bcba47fd7d2b7]*/
+_sha2_sha224_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=2163cb03b6cf6157 input=612f7682a889bc2a]*/
{
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
+
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
}
@@ -706,19 +719,25 @@ _sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity)
/*[clinic input]
_sha2.sha512
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-512 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=d55c8996eca214d7 input=0576ae2a6ebfad25]*/
+_sha2_sha512_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=cc3fcfce001a4538 input=19c9f2c06d59563a]*/
{
SHA512object *new;
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
sha2_state *state = sha2_get_state(module);
@@ -763,19 +782,25 @@ _sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity)
/*[clinic input]
_sha2.sha384
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-384 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=b29a0d81d51d1368 input=4e9199d8de0d2f9b]*/
+_sha2_sha384_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=b6e3db593b5a0330 input=9fd50c942ad9e0bf]*/
{
SHA512object *new;
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
sha2_state *state = sha2_get_state(module);
diff --git a/Modules/sha3module.c b/Modules/sha3module.c
index a7edf5c66a1..cfbf0cbcc04 100644
--- a/Modules/sha3module.c
+++ b/Modules/sha3module.c
@@ -105,18 +105,25 @@ sha3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *buf, Py_ssize_t len)
/*[clinic input]
@classmethod
_sha3.sha3_224.__new__ as py_sha3_new
- data: object(c_default="NULL") = b''
- /
+
+ data as data_obj: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new SHA3 hash object.
[clinic start generated code]*/
static PyObject *
-py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity)
-/*[clinic end generated code: output=90409addc5d5e8b0 input=637e5f8f6a93982a]*/
+py_sha3_new_impl(PyTypeObject *type, PyObject *data_obj, int usedforsecurity,
+ PyObject *string)
+/*[clinic end generated code: output=dcec1eca20395f2a input=c106e0b4e2d67d58]*/
{
+ PyObject *data;
+ if (_Py_hashlib_data_argument(&data, data_obj, string) < 0) {
+ return NULL;
+ }
+
Py_buffer buf = {NULL, NULL};
SHA3State *state = _PyType_GetModuleState(type);
SHA3object *self = newSHA3object(type);
@@ -503,14 +510,13 @@ _SHAKE_digest(PyObject *op, unsigned long digestlen, int hex)
_sha3.shake_128.digest
length: unsigned_long
- /
Return the digest value as a bytes object.
[clinic start generated code]*/
static PyObject *
_sha3_shake_128_digest_impl(SHA3object *self, unsigned long length)
-/*[clinic end generated code: output=2313605e2f87bb8f input=418ef6a36d2e6082]*/
+/*[clinic end generated code: output=2313605e2f87bb8f input=93d6d6ff32904f18]*/
{
return _SHAKE_digest((PyObject *)self, length, 0);
}
@@ -520,14 +526,13 @@ _sha3_shake_128_digest_impl(SHA3object *self, unsigned long length)
_sha3.shake_128.hexdigest
length: unsigned_long
- /
Return the digest value as a string of hexadecimal digits.
[clinic start generated code]*/
static PyObject *
_sha3_shake_128_hexdigest_impl(SHA3object *self, unsigned long length)
-/*[clinic end generated code: output=bf8e2f1e490944a8 input=69fb29b0926ae321]*/
+/*[clinic end generated code: output=bf8e2f1e490944a8 input=562d74e7060b56ab]*/
{
return _SHAKE_digest((PyObject *)self, length, 1);
}
diff --git a/Modules/syslogmodule.c b/Modules/syslogmodule.c
index 9c54af51402..ab20fff1509 100644
--- a/Modules/syslogmodule.c
+++ b/Modules/syslogmodule.c
@@ -56,7 +56,6 @@ Revision history:
#include "Python.h"
#include "osdefs.h" // SEP
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttrString()
#include <syslog.h>
@@ -92,7 +91,7 @@ syslog_get_argv(void)
Py_ssize_t slash;
PyObject *argv;
- if (_PySys_GetOptionalAttrString("argv", &argv) <= 0) {
+ if (PySys_GetOptionalAttrString("argv", &argv) <= 0) {
return NULL;
}
diff --git a/Modules/timemodule.c b/Modules/timemodule.c
index 1bfbf3f6a0b..3271d87ddc2 100644
--- a/Modules/timemodule.c
+++ b/Modules/timemodule.c
@@ -187,7 +187,7 @@ time_clockid_converter(PyObject *obj, clockid_t *p)
{
#ifdef _AIX
long long clk_id = PyLong_AsLongLong(obj);
-#elif defined(__DragonFly__)
+#elif defined(__DragonFly__) || defined(__CYGWIN__)
long clk_id = PyLong_AsLong(obj);
#else
int clk_id = PyLong_AsInt(obj);
diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c
index d4b4b91697c..f7009364644 100644
--- a/Modules/zlibmodule.c
+++ b/Modules/zlibmodule.c
@@ -17,6 +17,16 @@
#error "At least zlib version 1.2.2.1 is required"
#endif
+#if (SIZEOF_OFF_T == SIZEOF_SIZE_T)
+# define convert_to_z_off_t PyLong_AsSsize_t
+#elif (SIZEOF_OFF_T == SIZEOF_LONG_LONG)
+# define convert_to_z_off_t PyLong_AsLongLong
+#elif (SIZEOF_OFF_T == SIZEOF_LONG)
+# define convert_to_z_off_t PyLong_AsLong
+#else
+# error off_t does not match either size_t, long, or long long!
+#endif
+
// Blocks output buffer wrappers
#include "pycore_blocks_output_buffer.h"
@@ -1877,6 +1887,44 @@ zlib_adler32_impl(PyObject *module, Py_buffer *data, unsigned int value)
}
/*[clinic input]
+zlib.adler32_combine -> unsigned_int
+
+ adler1: unsigned_int(bitwise=True)
+ Adler-32 checksum for sequence A
+
+ adler2: unsigned_int(bitwise=True)
+ Adler-32 checksum for sequence B
+
+ len2: object(subclass_of='&PyLong_Type')
+ Length of sequence B
+ /
+
+Combine two Adler-32 checksums into one.
+
+Given the Adler-32 checksum 'adler1' of a sequence A and the
+Adler-32 checksum 'adler2' of a sequence B of length 'len2',
+return the Adler-32 checksum of A and B concatenated.
+[clinic start generated code]*/
+
+static unsigned int
+zlib_adler32_combine_impl(PyObject *module, unsigned int adler1,
+ unsigned int adler2, PyObject *len2)
+/*[clinic end generated code: output=61842cefb16afb1b input=51bb045c95130c6f]*/
+{
+#if defined(Z_WANT64)
+ z_off64_t len = convert_to_z_off_t(len2);
+#else
+ z_off_t len = convert_to_z_off_t(len2);
+#endif
+ if (PyErr_Occurred()) {
+ return (unsigned int)-1;
+ }
+ return adler32_combine(adler1, adler2, len);
+}
+
+
+
+/*[clinic input]
zlib.crc32 -> unsigned_int
data: Py_buffer
@@ -1923,13 +1971,50 @@ zlib_crc32_impl(PyObject *module, Py_buffer *data, unsigned int value)
return value;
}
+/*[clinic input]
+zlib.crc32_combine -> unsigned_int
+
+ crc1: unsigned_int(bitwise=True)
+ CRC-32 checksum for sequence A
+
+ crc2: unsigned_int(bitwise=True)
+ CRC-32 checksum for sequence B
+
+ len2: object(subclass_of='&PyLong_Type')
+ Length of sequence B
+ /
+
+Combine two CRC-32 checksums into one.
+
+Given the CRC-32 checksum 'crc1' of a sequence A and the
+CRC-32 checksum 'crc2' of a sequence B of length 'len2',
+return the CRC-32 checksum of A and B concatenated.
+[clinic start generated code]*/
+
+static unsigned int
+zlib_crc32_combine_impl(PyObject *module, unsigned int crc1,
+ unsigned int crc2, PyObject *len2)
+/*[clinic end generated code: output=c4def907c602e6eb input=9c8a065d9040dc66]*/
+{
+#if defined(Z_WANT64)
+ z_off64_t len = convert_to_z_off_t(len2);
+#else
+ z_off_t len = convert_to_z_off_t(len2);
+#endif
+ if (PyErr_Occurred()) {
+ return (unsigned int)-1;
+ }
+ return crc32_combine(crc1, crc2, len);
+}
static PyMethodDef zlib_methods[] =
{
ZLIB_ADLER32_METHODDEF
+ ZLIB_ADLER32_COMBINE_METHODDEF
ZLIB_COMPRESS_METHODDEF
ZLIB_COMPRESSOBJ_METHODDEF
ZLIB_CRC32_METHODDEF
+ ZLIB_CRC32_COMBINE_METHODDEF
ZLIB_DECOMPRESS_METHODDEF
ZLIB_DECOMPRESSOBJ_METHODDEF
{NULL, NULL}
@@ -1981,14 +2066,17 @@ static PyType_Spec ZlibDecompressor_type_spec = {
.flags = (Py_TPFLAGS_DEFAULT | Py_TPFLAGS_IMMUTABLETYPE),
.slots = ZlibDecompressor_type_slots,
};
+
PyDoc_STRVAR(zlib_module_documentation,
"The functions in this module allow compression and decompression using the\n"
"zlib library, which is based on GNU zip.\n"
"\n"
"adler32(string[, start]) -- Compute an Adler-32 checksum.\n"
+"adler32_combine(adler1, adler2, len2, /) -- Combine two Adler-32 checksums.\n"
"compress(data[, level]) -- Compress data, with compression level 0-9 or -1.\n"
"compressobj([level[, ...]]) -- Return a compressor object.\n"
"crc32(string[, start]) -- Compute a CRC-32 checksum.\n"
+"crc32_combine(crc1, crc2, len2, /) -- Combine two CRC-32 checksums.\n"
"decompress(string,[wbits],[bufsize]) -- Decompresses a compressed string.\n"
"decompressobj([wbits[, zdict]]) -- Return a decompressor object.\n"
"\n"
diff --git a/Objects/call.c b/Objects/call.c
index b1610dababd..c9a18bcc3da 100644
--- a/Objects/call.c
+++ b/Objects/call.c
@@ -834,12 +834,15 @@ PyObject_VectorcallMethod(PyObject *name, PyObject *const *args,
assert(PyVectorcall_NARGS(nargsf) >= 1);
PyThreadState *tstate = _PyThreadState_GET();
- PyObject *callable = NULL;
+ _PyCStackRef method;
+ _PyThreadState_PushCStackRef(tstate, &method);
/* Use args[0] as "self" argument */
- int unbound = _PyObject_GetMethod(args[0], name, &callable);
- if (callable == NULL) {
+ int unbound = _PyObject_GetMethodStackRef(tstate, args[0], name, &method.ref);
+ if (PyStackRef_IsNull(method.ref)) {
+ _PyThreadState_PopCStackRef(tstate, &method);
return NULL;
}
+ PyObject *callable = PyStackRef_AsPyObjectBorrow(method.ref);
if (unbound) {
/* We must remove PY_VECTORCALL_ARGUMENTS_OFFSET since
@@ -855,7 +858,7 @@ PyObject_VectorcallMethod(PyObject *name, PyObject *const *args,
EVAL_CALL_STAT_INC_IF_FUNCTION(EVAL_CALL_METHOD, callable);
PyObject *result = _PyObject_VectorcallTstate(tstate, callable,
args, nargsf, kwnames);
- Py_DECREF(callable);
+ _PyThreadState_PopCStackRef(tstate, &method);
return result;
}
@@ -868,11 +871,14 @@ PyObject_CallMethodObjArgs(PyObject *obj, PyObject *name, ...)
return null_error(tstate);
}
- PyObject *callable = NULL;
- int is_method = _PyObject_GetMethod(obj, name, &callable);
- if (callable == NULL) {
+ _PyCStackRef method;
+ _PyThreadState_PushCStackRef(tstate, &method);
+ int is_method = _PyObject_GetMethodStackRef(tstate, obj, name, &method.ref);
+ if (PyStackRef_IsNull(method.ref)) {
+ _PyThreadState_PopCStackRef(tstate, &method);
return NULL;
}
+ PyObject *callable = PyStackRef_AsPyObjectBorrow(method.ref);
obj = is_method ? obj : NULL;
va_list vargs;
@@ -880,7 +886,7 @@ PyObject_CallMethodObjArgs(PyObject *obj, PyObject *name, ...)
PyObject *result = object_vacall(tstate, obj, callable, vargs);
va_end(vargs);
- Py_DECREF(callable);
+ _PyThreadState_PopCStackRef(tstate, &method);
return result;
}
@@ -897,12 +903,15 @@ _PyObject_CallMethodIdObjArgs(PyObject *obj, _Py_Identifier *name, ...)
if (!oname) {
return NULL;
}
-
- PyObject *callable = NULL;
- int is_method = _PyObject_GetMethod(obj, oname, &callable);
- if (callable == NULL) {
+ _PyCStackRef method;
+ _PyThreadState_PushCStackRef(tstate, &method);
+ int is_method = _PyObject_GetMethodStackRef(tstate, obj, oname, &method.ref);
+ if (PyStackRef_IsNull(method.ref)) {
+ _PyThreadState_PopCStackRef(tstate, &method);
return NULL;
}
+ PyObject *callable = PyStackRef_AsPyObjectBorrow(method.ref);
+
obj = is_method ? obj : NULL;
va_list vargs;
@@ -910,7 +919,7 @@ _PyObject_CallMethodIdObjArgs(PyObject *obj, _Py_Identifier *name, ...)
PyObject *result = object_vacall(tstate, obj, callable, vargs);
va_end(vargs);
- Py_DECREF(callable);
+ _PyThreadState_PopCStackRef(tstate, &method);
return result;
}
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
index 76b52efccf8..601fc69c4b1 100644
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -1386,6 +1386,10 @@ mark_stacks(PyCodeObject *code_obj, int len)
stacks[j] = next_stack;
break;
case GET_ITER:
+ next_stack = push_value(pop_value(next_stack), Iterator);
+ next_stack = push_value(next_stack, Iterator);
+ stacks[next_i] = next_stack;
+ break;
case GET_AITER:
next_stack = push_value(pop_value(next_stack), Iterator);
stacks[next_i] = next_stack;
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index 27214a129c2..f87b0e5d8f1 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -1264,26 +1264,32 @@ _PyFunction_VerifyStateless(PyThreadState *tstate, PyObject *func)
}
// Disallow __defaults__.
PyObject *defaults = PyFunction_GET_DEFAULTS(func);
- if (defaults != NULL && defaults != Py_None && PyDict_Size(defaults) > 0)
- {
- _PyErr_SetString(tstate, PyExc_ValueError, "defaults not supported");
- return -1;
+ if (defaults != NULL) {
+ assert(PyTuple_Check(defaults)); // per PyFunction_New()
+ if (PyTuple_GET_SIZE(defaults) > 0) {
+ _PyErr_SetString(tstate, PyExc_ValueError,
+ "defaults not supported");
+ return -1;
+ }
}
// Disallow __kwdefaults__.
PyObject *kwdefaults = PyFunction_GET_KW_DEFAULTS(func);
- if (kwdefaults != NULL && kwdefaults != Py_None
- && PyDict_Size(kwdefaults) > 0)
- {
- _PyErr_SetString(tstate, PyExc_ValueError,
- "keyword defaults not supported");
- return -1;
+ if (kwdefaults != NULL) {
+ assert(PyDict_Check(kwdefaults)); // per PyFunction_New()
+ if (PyDict_Size(kwdefaults) > 0) {
+ _PyErr_SetString(tstate, PyExc_ValueError,
+ "keyword defaults not supported");
+ return -1;
+ }
}
// Disallow __closure__.
PyObject *closure = PyFunction_GET_CLOSURE(func);
- if (closure != NULL && closure != Py_None && PyTuple_GET_SIZE(closure) > 0)
- {
- _PyErr_SetString(tstate, PyExc_ValueError, "closures not supported");
- return -1;
+ if (closure != NULL) {
+ assert(PyTuple_Check(closure)); // per PyFunction_New()
+ if (PyTuple_GET_SIZE(closure) > 0) {
+ _PyErr_SetString(tstate, PyExc_ValueError, "closures not supported");
+ return -1;
+ }
}
// Check the code.
PyCodeObject *co = (PyCodeObject *)PyFunction_GET_CODE(func);
diff --git a/Objects/genericaliasobject.c b/Objects/genericaliasobject.c
index ec3d01f00a3..07b57f0c552 100644
--- a/Objects/genericaliasobject.c
+++ b/Objects/genericaliasobject.c
@@ -65,7 +65,7 @@ ga_repr_items_list(PyUnicodeWriter *writer, PyObject *p)
for (Py_ssize_t i = 0; i < len; i++) {
if (i > 0) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ", 2) < 0) {
return -1;
}
}
@@ -109,7 +109,7 @@ ga_repr(PyObject *self)
}
for (Py_ssize_t i = 0; i < len; i++) {
if (i > 0) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ", 2) < 0) {
goto error;
}
}
@@ -126,7 +126,7 @@ ga_repr(PyObject *self)
}
if (len == 0) {
// for something like tuple[()] we should print a "()"
- if (PyUnicodeWriter_WriteUTF8(writer, "()", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, "()", 2) < 0) {
goto error;
}
}
diff --git a/Objects/genobject.c b/Objects/genobject.c
index 98b2c5004df..da1462deaaa 100644
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -1451,7 +1451,9 @@ typedef struct PyAsyncGenAThrow {
/* Can be NULL, when in the "aclose()" mode
(equivalent of "athrow(GeneratorExit)") */
- PyObject *agt_args;
+ PyObject *agt_typ;
+ PyObject *agt_tb;
+ PyObject *agt_val;
AwaitableState agt_state;
} PyAsyncGenAThrow;
@@ -2078,7 +2080,9 @@ async_gen_athrow_dealloc(PyObject *self)
_PyObject_GC_UNTRACK(self);
Py_CLEAR(agt->agt_gen);
- Py_CLEAR(agt->agt_args);
+ Py_XDECREF(agt->agt_typ);
+ Py_XDECREF(agt->agt_tb);
+ Py_XDECREF(agt->agt_val);
PyObject_GC_Del(self);
}
@@ -2088,7 +2092,9 @@ async_gen_athrow_traverse(PyObject *self, visitproc visit, void *arg)
{
PyAsyncGenAThrow *agt = _PyAsyncGenAThrow_CAST(self);
Py_VISIT(agt->agt_gen);
- Py_VISIT(agt->agt_args);
+ Py_VISIT(agt->agt_typ);
+ Py_VISIT(agt->agt_tb);
+ Py_VISIT(agt->agt_val);
return 0;
}
@@ -2116,7 +2122,7 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
if (o->agt_state == AWAITABLE_STATE_INIT) {
if (o->agt_gen->ag_running_async) {
o->agt_state = AWAITABLE_STATE_CLOSED;
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
PyErr_SetString(
PyExc_RuntimeError,
"aclose(): asynchronous generator is already running");
@@ -2143,7 +2149,7 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
o->agt_state = AWAITABLE_STATE_ITER;
o->agt_gen->ag_running_async = 1;
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
/* aclose() mode */
o->agt_gen->ag_closed = 1;
@@ -2157,19 +2163,10 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
goto yield_close;
}
} else {
- PyObject *typ;
- PyObject *tb = NULL;
- PyObject *val = NULL;
-
- if (!PyArg_UnpackTuple(o->agt_args, "athrow", 1, 3,
- &typ, &val, &tb)) {
- return NULL;
- }
-
retval = _gen_throw((PyGenObject *)gen,
0, /* Do not close generator when
PyExc_GeneratorExit is passed */
- typ, val, tb);
+ o->agt_typ, o->agt_val, o->agt_tb);
retval = async_gen_unwrap_value(o->agt_gen, retval);
}
if (retval == NULL) {
@@ -2181,7 +2178,7 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
assert(o->agt_state == AWAITABLE_STATE_ITER);
retval = gen_send((PyObject *)gen, arg);
- if (o->agt_args) {
+ if (o->agt_typ) {
return async_gen_unwrap_value(o->agt_gen, retval);
} else {
/* aclose() mode */
@@ -2212,7 +2209,7 @@ check_error:
if (PyErr_ExceptionMatches(PyExc_StopAsyncIteration) ||
PyErr_ExceptionMatches(PyExc_GeneratorExit))
{
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
/* when aclose() is called we don't want to propagate
StopAsyncIteration or GeneratorExit; just raise
StopIteration, signalling that this 'aclose()' await
@@ -2241,7 +2238,7 @@ async_gen_athrow_throw(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
if (o->agt_state == AWAITABLE_STATE_INIT) {
if (o->agt_gen->ag_running_async) {
o->agt_state = AWAITABLE_STATE_CLOSED;
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
PyErr_SetString(
PyExc_RuntimeError,
"aclose(): asynchronous generator is already running");
@@ -2259,7 +2256,7 @@ async_gen_athrow_throw(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
}
PyObject *retval = gen_throw((PyObject*)o->agt_gen, args, nargs);
- if (o->agt_args) {
+ if (o->agt_typ) {
retval = async_gen_unwrap_value(o->agt_gen, retval);
if (retval == NULL) {
o->agt_gen->ag_running_async = 0;
@@ -2334,7 +2331,7 @@ async_gen_athrow_finalize(PyObject *op)
{
PyAsyncGenAThrow *o = (PyAsyncGenAThrow*)op;
if (o->agt_state == AWAITABLE_STATE_INIT) {
- PyObject *method = o->agt_args ? &_Py_ID(athrow) : &_Py_ID(aclose);
+ PyObject *method = o->agt_typ ? &_Py_ID(athrow) : &_Py_ID(aclose);
_PyErr_WarnUnawaitedAgenMethod(o->agt_gen, method);
}
}
@@ -2403,13 +2400,23 @@ PyTypeObject _PyAsyncGenAThrow_Type = {
static PyObject *
async_gen_athrow_new(PyAsyncGenObject *gen, PyObject *args)
{
+ PyObject *typ = NULL;
+ PyObject *tb = NULL;
+ PyObject *val = NULL;
+ if (args && !PyArg_UnpackTuple(args, "athrow", 1, 3, &typ, &val, &tb)) {
+ return NULL;
+ }
+
PyAsyncGenAThrow *o;
o = PyObject_GC_New(PyAsyncGenAThrow, &_PyAsyncGenAThrow_Type);
if (o == NULL) {
return NULL;
}
o->agt_gen = (PyAsyncGenObject*)Py_NewRef(gen);
- o->agt_args = Py_XNewRef(args);
+ o->agt_typ = Py_XNewRef(typ);
+ o->agt_tb = Py_XNewRef(tb);
+ o->agt_val = Py_XNewRef(val);
+
o->agt_state = AWAITABLE_STATE_INIT;
_PyObject_GC_TRACK((PyObject*)o);
return (PyObject*)o;
diff --git a/Objects/longobject.c b/Objects/longobject.c
index 0b2dfa003fa..2b533312fee 100644
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -971,16 +971,9 @@ _PyLong_FromByteArray(const unsigned char* bytes, size_t n,
++numsignificantbytes;
}
- /* How many Python int digits do we need? We have
- 8*numsignificantbytes bits, and each Python int digit has
- PyLong_SHIFT bits, so it's the ceiling of the quotient. */
- /* catch overflow before it happens */
- if (numsignificantbytes > (PY_SSIZE_T_MAX - PyLong_SHIFT) / 8) {
- PyErr_SetString(PyExc_OverflowError,
- "byte array too long to convert to int");
- return NULL;
- }
- ndigits = (numsignificantbytes * 8 + PyLong_SHIFT - 1) / PyLong_SHIFT;
+ /* avoid integer overflow */
+ ndigits = numsignificantbytes / PyLong_SHIFT * 8
+ + (numsignificantbytes % PyLong_SHIFT * 8 + PyLong_SHIFT - 1) / PyLong_SHIFT;
v = long_alloc(ndigits);
if (v == NULL)
return NULL;
diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c
index f363ef173cb..ba86b41e945 100644
--- a/Objects/moduleobject.c
+++ b/Objects/moduleobject.c
@@ -12,7 +12,6 @@
#include "pycore_object.h" // _PyType_AllocNoTrack
#include "pycore_pyerrors.h" // _PyErr_FormatFromCause()
#include "pycore_pystate.h" // _PyInterpreterState_GET()
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttrString()
#include "pycore_unicodeobject.h" // _PyUnicode_EqualToASCIIString()
#include "osdefs.h" // MAXPATHLEN
@@ -1058,7 +1057,7 @@ _Py_module_getattro_impl(PyModuleObject *m, PyObject *name, int suppress)
int is_possibly_shadowing_stdlib = 0;
if (is_possibly_shadowing) {
PyObject *stdlib_modules;
- if (_PySys_GetOptionalAttrString("stdlib_module_names", &stdlib_modules) < 0) {
+ if (PySys_GetOptionalAttrString("stdlib_module_names", &stdlib_modules) < 0) {
goto done;
}
if (stdlib_modules && PyAnySet_Check(stdlib_modules)) {
diff --git a/Objects/object.c b/Objects/object.c
index af1aa217f75..9fe61ba7f15 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -1664,6 +1664,116 @@ _PyObject_GetMethod(PyObject *obj, PyObject *name, PyObject **method)
return 0;
}
+int
+_PyObject_GetMethodStackRef(PyThreadState *ts, PyObject *obj,
+ PyObject *name, _PyStackRef *method)
+{
+ int meth_found = 0;
+
+ assert(PyStackRef_IsNull(*method));
+
+ PyTypeObject *tp = Py_TYPE(obj);
+ if (!_PyType_IsReady(tp)) {
+ if (PyType_Ready(tp) < 0) {
+ return 0;
+ }
+ }
+
+ if (tp->tp_getattro != PyObject_GenericGetAttr || !PyUnicode_CheckExact(name)) {
+ PyObject *res = PyObject_GetAttr(obj, name);
+ if (res != NULL) {
+ *method = PyStackRef_FromPyObjectSteal(res);
+ }
+ return 0;
+ }
+
+ _PyType_LookupStackRefAndVersion(tp, name, method);
+ PyObject *descr = PyStackRef_AsPyObjectBorrow(*method);
+ descrgetfunc f = NULL;
+ if (descr != NULL) {
+ if (_PyType_HasFeature(Py_TYPE(descr), Py_TPFLAGS_METHOD_DESCRIPTOR)) {
+ meth_found = 1;
+ }
+ else {
+ f = Py_TYPE(descr)->tp_descr_get;
+ if (f != NULL && PyDescr_IsData(descr)) {
+ PyObject *value = f(descr, obj, (PyObject *)Py_TYPE(obj));
+ PyStackRef_CLEAR(*method);
+ if (value != NULL) {
+ *method = PyStackRef_FromPyObjectSteal(value);
+ }
+ return 0;
+ }
+ }
+ }
+ PyObject *dict, *attr;
+ if ((tp->tp_flags & Py_TPFLAGS_INLINE_VALUES) &&
+ _PyObject_TryGetInstanceAttribute(obj, name, &attr)) {
+ if (attr != NULL) {
+ PyStackRef_CLEAR(*method);
+ *method = PyStackRef_FromPyObjectSteal(attr);
+ return 0;
+ }
+ dict = NULL;
+ }
+ else if ((tp->tp_flags & Py_TPFLAGS_MANAGED_DICT)) {
+ dict = (PyObject *)_PyObject_GetManagedDict(obj);
+ }
+ else {
+ PyObject **dictptr = _PyObject_ComputedDictPointer(obj);
+ if (dictptr != NULL) {
+ dict = FT_ATOMIC_LOAD_PTR_ACQUIRE(*dictptr);
+ }
+ else {
+ dict = NULL;
+ }
+ }
+ if (dict != NULL) {
+ // TODO: use _Py_dict_lookup_threadsafe_stackref
+ Py_INCREF(dict);
+ PyObject *value;
+ if (PyDict_GetItemRef(dict, name, &value) != 0) {
+ // found or error
+ Py_DECREF(dict);
+ PyStackRef_CLEAR(*method);
+ if (value != NULL) {
+ *method = PyStackRef_FromPyObjectSteal(value);
+ }
+ return 0;
+ }
+ // not found
+ Py_DECREF(dict);
+ }
+
+ if (meth_found) {
+ assert(!PyStackRef_IsNull(*method));
+ return 1;
+ }
+
+ if (f != NULL) {
+ PyObject *value = f(descr, obj, (PyObject *)Py_TYPE(obj));
+ PyStackRef_CLEAR(*method);
+ if (value) {
+ *method = PyStackRef_FromPyObjectSteal(value);
+ }
+ return 0;
+ }
+
+ if (descr != NULL) {
+ assert(!PyStackRef_IsNull(*method));
+ return 0;
+ }
+
+ PyErr_Format(PyExc_AttributeError,
+ "'%.100s' object has no attribute '%U'",
+ tp->tp_name, name);
+
+ _PyObject_SetAttributeErrorContext(obj, name);
+ assert(PyStackRef_IsNull(*method));
+ return 0;
+}
+
+
/* Generic GetAttr functions - put these in your tp_[gs]etattro slot. */
PyObject *
@@ -1906,40 +2016,11 @@ PyObject_GenericSetAttr(PyObject *obj, PyObject *name, PyObject *value)
int
PyObject_GenericSetDict(PyObject *obj, PyObject *value, void *context)
{
- PyObject **dictptr = _PyObject_GetDictPtr(obj);
- if (dictptr == NULL) {
- if (_PyType_HasFeature(Py_TYPE(obj), Py_TPFLAGS_INLINE_VALUES) &&
- _PyObject_GetManagedDict(obj) == NULL
- ) {
- /* Was unable to convert to dict */
- PyErr_NoMemory();
- }
- else {
- PyErr_SetString(PyExc_AttributeError,
- "This object has no __dict__");
- }
- return -1;
- }
if (value == NULL) {
PyErr_SetString(PyExc_TypeError, "cannot delete __dict__");
return -1;
}
- if (!PyDict_Check(value)) {
- PyErr_Format(PyExc_TypeError,
- "__dict__ must be set to a dictionary, "
- "not a '%.200s'", Py_TYPE(value)->tp_name);
- return -1;
- }
- Py_BEGIN_CRITICAL_SECTION(obj);
- PyObject *olddict = *dictptr;
- FT_ATOMIC_STORE_PTR_RELEASE(*dictptr, Py_NewRef(value));
-#ifdef Py_GIL_DISABLED
- _PyObject_XDecRefDelayed(olddict);
-#else
- Py_XDECREF(olddict);
-#endif
- Py_END_CRITICAL_SECTION();
- return 0;
+ return _PyObject_SetDict(obj, value);
}
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index b209808da90..d3931aab623 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -1238,7 +1238,7 @@ work_queue_first(struct llist_node *head)
}
static void
-process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr,
+process_queue(struct llist_node *head, _PyThreadStateImpl *tstate,
bool keep_empty, delayed_dealloc_cb cb, void *state)
{
while (!llist_empty(head)) {
@@ -1246,7 +1246,7 @@ process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr,
if (buf->rd_idx < buf->wr_idx) {
struct _mem_work_item *item = &buf->array[buf->rd_idx];
- if (!_Py_qsbr_poll(qsbr, item->qsbr_goal)) {
+ if (!_Py_qsbr_poll(tstate->qsbr, item->qsbr_goal)) {
return;
}
@@ -1270,11 +1270,11 @@ process_queue(struct llist_node *head, struct _qsbr_thread_state *qsbr,
static void
process_interp_queue(struct _Py_mem_interp_free_queue *queue,
- struct _qsbr_thread_state *qsbr, delayed_dealloc_cb cb,
+ _PyThreadStateImpl *tstate, delayed_dealloc_cb cb,
void *state)
{
assert(PyMutex_IsLocked(&queue->mutex));
- process_queue(&queue->head, qsbr, false, cb, state);
+ process_queue(&queue->head, tstate, false, cb, state);
int more_work = !llist_empty(&queue->head);
_Py_atomic_store_int_relaxed(&queue->has_work, more_work);
@@ -1282,7 +1282,7 @@ process_interp_queue(struct _Py_mem_interp_free_queue *queue,
static void
maybe_process_interp_queue(struct _Py_mem_interp_free_queue *queue,
- struct _qsbr_thread_state *qsbr, delayed_dealloc_cb cb,
+ _PyThreadStateImpl *tstate, delayed_dealloc_cb cb,
void *state)
{
if (!_Py_atomic_load_int_relaxed(&queue->has_work)) {
@@ -1291,7 +1291,7 @@ maybe_process_interp_queue(struct _Py_mem_interp_free_queue *queue,
// Try to acquire the lock, but don't block if it's already held.
if (_PyMutex_LockTimed(&queue->mutex, 0, 0) == PY_LOCK_ACQUIRED) {
- process_interp_queue(queue, qsbr, cb, state);
+ process_interp_queue(queue, tstate, cb, state);
PyMutex_Unlock(&queue->mutex);
}
}
@@ -1303,10 +1303,10 @@ _PyMem_ProcessDelayed(PyThreadState *tstate)
_PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate;
// Process thread-local work
- process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true, NULL, NULL);
+ process_queue(&tstate_impl->mem_free_queue, tstate_impl, true, NULL, NULL);
// Process shared interpreter work
- maybe_process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr, NULL, NULL);
+ maybe_process_interp_queue(&interp->mem_free_queue, tstate_impl, NULL, NULL);
}
void
@@ -1316,10 +1316,10 @@ _PyMem_ProcessDelayedNoDealloc(PyThreadState *tstate, delayed_dealloc_cb cb, voi
_PyThreadStateImpl *tstate_impl = (_PyThreadStateImpl *)tstate;
// Process thread-local work
- process_queue(&tstate_impl->mem_free_queue, tstate_impl->qsbr, true, cb, state);
+ process_queue(&tstate_impl->mem_free_queue, tstate_impl, true, cb, state);
// Process shared interpreter work
- maybe_process_interp_queue(&interp->mem_free_queue, tstate_impl->qsbr, cb, state);
+ maybe_process_interp_queue(&interp->mem_free_queue, tstate_impl, cb, state);
}
void
@@ -1348,7 +1348,7 @@ _PyMem_AbandonDelayed(PyThreadState *tstate)
// Process the merged queue now (see gh-130794).
_PyThreadStateImpl *this_tstate = (_PyThreadStateImpl *)_PyThreadState_GET();
- process_interp_queue(&interp->mem_free_queue, this_tstate->qsbr, NULL, NULL);
+ process_interp_queue(&interp->mem_free_queue, this_tstate, NULL, NULL);
PyMutex_Unlock(&interp->mem_free_queue.mutex);
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index ee09289425b..db923c16477 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -48,7 +48,7 @@ class object "PyObject *" "&PyBaseObject_Type"
& ((1 << MCACHE_SIZE_EXP) - 1))
#define MCACHE_HASH_METHOD(type, name) \
- MCACHE_HASH(FT_ATOMIC_LOAD_UINT32_RELAXED((type)->tp_version_tag), \
+ MCACHE_HASH(FT_ATOMIC_LOAD_UINT_RELAXED((type)->tp_version_tag), \
((Py_ssize_t)(name)) >> 3)
#define MCACHE_CACHEABLE_NAME(name) \
PyUnicode_CheckExact(name) && \
@@ -60,11 +60,19 @@ class object "PyObject *" "&PyBaseObject_Type"
#ifdef Py_GIL_DISABLED
-// There's a global lock for mutation of types. This avoids having to take
-// additional locks while doing various subclass processing which may result
-// in odd behaviors w.r.t. running with the GIL as the outer type lock could
-// be released and reacquired during a subclass update if there's contention
-// on the subclass lock.
+// There's a global lock for types that ensures that tp_version_tag and
+// _spec_cache are correctly updated if the type is modified. It also protects
+// tp_mro, tp_bases, and tp_base. This avoids having to take additional locks
+// while doing various subclass processing which may result in odd behaviors
+// w.r.t. running with the GIL as the outer type lock could be released and
+// reacquired during a subclass update if there's contention on the subclass
+// lock.
+//
+// Note that this lock does not protect updates of other type slots or the
+// tp_flags member. Instead, we either ensure those updates are done before
+// the type has been revealed to other threads or we only do those updates
+// while the stop-the-world mechanism is active. The slots and flags are read
+// in many places without holding a lock and without atomics.
#define TYPE_LOCK &PyInterpreterState_Get()->types.mutex
#define BEGIN_TYPE_LOCK() Py_BEGIN_CRITICAL_SECTION_MUT(TYPE_LOCK)
#define END_TYPE_LOCK() Py_END_CRITICAL_SECTION()
@@ -74,8 +82,100 @@ class object "PyObject *" "&PyBaseObject_Type"
#define END_TYPE_DICT_LOCK() Py_END_CRITICAL_SECTION2()
+#ifdef Py_DEBUG
+// Return true if the world is currently stopped.
+static bool
+types_world_is_stopped(void)
+{
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ return interp->stoptheworld.world_stopped;
+}
+#endif
+
+// Checks that the type has not yet been revealed (exposed) to other
+// threads. The _Py_TYPE_REVEALED_FLAG flag is set by type_new() and
+// PyType_FromMetaclass() to indicate that a newly initialized type might be
+// revealed. We only have ob_flags on 64-bit platforms.
+#if SIZEOF_VOID_P > 4
+#define TYPE_IS_REVEALED(tp) ((((PyObject *)(tp))->ob_flags & _Py_TYPE_REVEALED_FLAG) != 0)
+#else
+#define TYPE_IS_REVEALED(tp) 0
+#endif
+
+#ifdef Py_DEBUG
#define ASSERT_TYPE_LOCK_HELD() \
- _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(TYPE_LOCK)
+ if (!types_world_is_stopped()) { _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(TYPE_LOCK); }
+
+// Checks if we can safely update type slots or tp_flags.
+#define ASSERT_WORLD_STOPPED_OR_NEW_TYPE(tp) \
+ assert(!TYPE_IS_REVEALED(tp) || types_world_is_stopped())
+
+#define ASSERT_NEW_TYPE_OR_LOCKED(tp) \
+ if (TYPE_IS_REVEALED(tp)) { ASSERT_TYPE_LOCK_HELD(); }
+#else
+#define ASSERT_TYPE_LOCK_HELD()
+#define ASSERT_WORLD_STOPPED_OR_NEW_TYPE(tp)
+#define ASSERT_NEW_TYPE_OR_LOCKED(tp)
+#endif
+
+static void
+types_stop_world(void)
+{
+ assert(!types_world_is_stopped());
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ _PyEval_StopTheWorld(interp);
+ assert(types_world_is_stopped());
+}
+
+static void
+types_start_world(void)
+{
+ assert(types_world_is_stopped());
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ _PyEval_StartTheWorld(interp);
+ assert(!types_world_is_stopped());
+}
+
+// This is used to temporarily prevent the TYPE_LOCK from being suspended
+// when held by the topmost critical section.
+static void
+type_lock_prevent_release(void)
+{
+ PyThreadState *tstate = _PyThreadState_GET();
+ uintptr_t *tagptr = &tstate->critical_section;
+ PyCriticalSection *c = (PyCriticalSection *)(*tagptr & ~_Py_CRITICAL_SECTION_MASK);
+ if (!(*tagptr & _Py_CRITICAL_SECTION_TWO_MUTEXES)) {
+ assert(c->_cs_mutex == TYPE_LOCK);
+ c->_cs_mutex = NULL;
+ }
+ else {
+ PyCriticalSection2 *c2 = (PyCriticalSection2 *)c;
+ if (c->_cs_mutex == TYPE_LOCK) {
+ c->_cs_mutex = c2->_cs_mutex2;
+ c2->_cs_mutex2 = NULL;
+ } else {
+ assert(c2->_cs_mutex2 == TYPE_LOCK);
+ c2->_cs_mutex2 = NULL;
+ }
+ }
+}
+
+static void
+type_lock_allow_release(void)
+{
+ PyThreadState *tstate = _PyThreadState_GET();
+ uintptr_t *tagptr = &tstate->critical_section;
+ PyCriticalSection *c = (PyCriticalSection *)(*tagptr & ~_Py_CRITICAL_SECTION_MASK);
+ if (!(*tagptr & _Py_CRITICAL_SECTION_TWO_MUTEXES)) {
+ assert(c->_cs_mutex == NULL);
+ c->_cs_mutex = TYPE_LOCK;
+ }
+ else {
+ PyCriticalSection2 *c2 = (PyCriticalSection2 *)c;
+ assert(c2->_cs_mutex2 == NULL);
+ c2->_cs_mutex2 = TYPE_LOCK;
+ }
+}
#else
@@ -84,6 +184,12 @@ class object "PyObject *" "&PyBaseObject_Type"
#define BEGIN_TYPE_DICT_LOCK(d)
#define END_TYPE_DICT_LOCK()
#define ASSERT_TYPE_LOCK_HELD()
+#define TYPE_IS_REVEALED(tp) 0
+#define ASSERT_WORLD_STOPPED_OR_NEW_TYPE(tp)
+#define ASSERT_NEW_TYPE_OR_LOCKED(tp)
+#define types_world_is_stopped() 1
+#define types_stop_world()
+#define types_start_world()
#endif
@@ -106,6 +212,9 @@ slot_tp_new(PyTypeObject *type, PyObject *args, PyObject *kwds);
static int
slot_tp_setattro(PyObject *self, PyObject *name, PyObject *value);
+static PyObject *
+slot_tp_call(PyObject *self, PyObject *args, PyObject *kwds);
+
static inline PyTypeObject *
type_from_ref(PyObject *ref)
{
@@ -346,21 +455,14 @@ _PyStaticType_GetBuiltins(void)
static void
type_set_flags(PyTypeObject *tp, unsigned long flags)
{
- if (tp->tp_flags & Py_TPFLAGS_READY) {
- // It's possible the type object has been exposed to other threads
- // if it's been marked ready. In that case, the type lock should be
- // held when flags are modified.
- ASSERT_TYPE_LOCK_HELD();
- }
- // Since PyType_HasFeature() reads the flags without holding the type
- // lock, we need an atomic store here.
- FT_ATOMIC_STORE_ULONG_RELAXED(tp->tp_flags, flags);
+ ASSERT_WORLD_STOPPED_OR_NEW_TYPE(tp);
+ tp->tp_flags = flags;
}
static void
type_set_flags_with_mask(PyTypeObject *tp, unsigned long mask, unsigned long flags)
{
- ASSERT_TYPE_LOCK_HELD();
+ ASSERT_WORLD_STOPPED_OR_NEW_TYPE(tp);
unsigned long new_flags = (tp->tp_flags & ~mask) | flags;
type_set_flags(tp, new_flags);
}
@@ -498,6 +600,7 @@ static inline void
set_tp_bases(PyTypeObject *self, PyObject *bases, int initial)
{
assert(PyTuple_Check(bases));
+ ASSERT_NEW_TYPE_OR_LOCKED(self);
if (self->tp_flags & _Py_TPFLAGS_STATIC_BUILTIN) {
// XXX tp_bases can probably be statically allocated for each
// static builtin type.
@@ -542,7 +645,7 @@ clear_tp_bases(PyTypeObject *self, int final)
static inline PyObject *
lookup_tp_mro(PyTypeObject *self)
{
- ASSERT_TYPE_LOCK_HELD();
+ ASSERT_NEW_TYPE_OR_LOCKED(self);
return self->tp_mro;
}
@@ -1027,7 +1130,6 @@ PyType_Unwatch(int watcher_id, PyObject* obj)
static void
set_version_unlocked(PyTypeObject *tp, unsigned int version)
{
- ASSERT_TYPE_LOCK_HELD();
assert(version == 0 || (tp->tp_versions_used != _Py_ATTR_CACHE_UNUSED));
#ifndef Py_GIL_DISABLED
PyInterpreterState *interp = _PyInterpreterState_GET();
@@ -1075,7 +1177,12 @@ type_modified_unlocked(PyTypeObject *type)
We don't assign new version tags eagerly, but only as
needed.
*/
- ASSERT_TYPE_LOCK_HELD();
+ ASSERT_NEW_TYPE_OR_LOCKED(type);
+#ifdef Py_GIL_DISABLED
+ // This function is re-entrant and it's not safe to call it
+ // with the world stopped.
+ assert(!types_world_is_stopped());
+#endif
if (type->tp_version_tag == 0) {
return;
}
@@ -1106,6 +1213,8 @@ type_modified_unlocked(PyTypeObject *type)
while (bits) {
assert(i < TYPE_MAX_WATCHERS);
if (bits & 1) {
+ // Note that PyErr_FormatUnraisable is potentially re-entrant
+ // and the watcher callback might be too.
PyType_WatchCallback cb = interp->type_watchers[i];
if (cb && (cb(type) < 0)) {
PyErr_FormatUnraisable(
@@ -1245,14 +1354,6 @@ _PyType_LookupByVersion(unsigned int version)
#endif
}
-unsigned int
-_PyType_GetVersionForCurrentState(PyTypeObject *tp)
-{
- return tp->tp_version_tag;
-}
-
-
-
#define MAX_VERSIONS_PER_CLASS 1000
#if _Py_ATTR_CACHE_UNUSED < MAX_VERSIONS_PER_CLASS
#error "_Py_ATTR_CACHE_UNUSED must be bigger than max"
@@ -1586,10 +1687,13 @@ type_set_abstractmethods(PyObject *tp, PyObject *value, void *Py_UNUSED(closure)
BEGIN_TYPE_LOCK();
type_modified_unlocked(type);
+ types_stop_world();
if (abstract)
type_add_flags(type, Py_TPFLAGS_IS_ABSTRACT);
else
type_clear_flags(type, Py_TPFLAGS_IS_ABSTRACT);
+ types_start_world();
+ ASSERT_TYPE_LOCK_HELD();
END_TYPE_LOCK();
return 0;
@@ -1624,15 +1728,15 @@ type_get_mro(PyObject *tp, void *Py_UNUSED(closure))
return mro;
}
-static PyTypeObject *best_base(PyObject *);
-static int mro_internal(PyTypeObject *, PyObject **);
+static PyTypeObject *find_best_base(PyObject *);
+static int mro_internal(PyTypeObject *, int, PyObject **);
static int type_is_subtype_base_chain(PyTypeObject *, PyTypeObject *);
static int compatible_for_assignment(PyTypeObject *, PyTypeObject *, const char *);
static int add_subclass(PyTypeObject*, PyTypeObject*);
static int add_all_subclasses(PyTypeObject *type, PyObject *bases);
static void remove_subclass(PyTypeObject *, PyTypeObject *);
static void remove_all_subclasses(PyTypeObject *type, PyObject *bases);
-static void update_all_slots(PyTypeObject *);
+static int update_all_slots(PyTypeObject *);
typedef int (*update_callback)(PyTypeObject *, void *);
static int update_subclasses(PyTypeObject *type, PyObject *attr_name,
@@ -1640,13 +1744,15 @@ static int update_subclasses(PyTypeObject *type, PyObject *attr_name,
static int recurse_down_subclasses(PyTypeObject *type, PyObject *name,
update_callback callback, void *data);
+// Compute tp_mro for this type and all of its subclasses. This
+// is called after __bases__ is assigned to an existing type.
static int
mro_hierarchy(PyTypeObject *type, PyObject *temp)
{
ASSERT_TYPE_LOCK_HELD();
PyObject *old_mro;
- int res = mro_internal(type, &old_mro);
+ int res = mro_internal(type, 0, &old_mro);
if (res <= 0) {
/* error / reentrance */
return res;
@@ -1708,9 +1814,9 @@ mro_hierarchy(PyTypeObject *type, PyObject *temp)
}
static int
-type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases)
+type_check_new_bases(PyTypeObject *type, PyObject *new_bases, PyTypeObject **best_base)
{
- // Check arguments
+ // Check arguments, this is re-entrant due to the PySys_Audit() call
if (!check_set_special_type_attr(type, new_bases, "__bases__")) {
return -1;
}
@@ -1759,20 +1865,29 @@ type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases)
}
// Compute the new MRO and the new base class
- PyTypeObject *new_base = best_base(new_bases);
- if (new_base == NULL)
+ *best_base = find_best_base(new_bases);
+ if (*best_base == NULL)
return -1;
- if (!compatible_for_assignment(type->tp_base, new_base, "__bases__")) {
+ if (!compatible_for_assignment(type->tp_base, *best_base, "__bases__")) {
return -1;
}
+ return 0;
+}
+
+static int
+type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases, PyTypeObject *best_base)
+{
+ ASSERT_TYPE_LOCK_HELD();
+
+ Py_ssize_t n;
PyObject *old_bases = lookup_tp_bases(type);
assert(old_bases != NULL);
PyTypeObject *old_base = type->tp_base;
set_tp_bases(type, Py_NewRef(new_bases), 0);
- type->tp_base = (PyTypeObject *)Py_NewRef(new_base);
+ type->tp_base = (PyTypeObject *)Py_NewRef(best_base);
PyObject *temp = PyList_New(0);
if (temp == NULL) {
@@ -1796,7 +1911,11 @@ type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases)
add to all new_bases */
remove_all_subclasses(type, old_bases);
res = add_all_subclasses(type, new_bases);
- update_all_slots(type);
+ if (update_all_slots(type) < 0) {
+ goto bail;
+ }
+ /* Clear the VALID_VERSION flag of 'type' and all its subclasses. */
+ type_modified_unlocked(type);
}
else {
res = 0;
@@ -1827,13 +1946,13 @@ type_set_bases_unlocked(PyTypeObject *type, PyObject *new_bases)
bail:
if (lookup_tp_bases(type) == new_bases) {
- assert(type->tp_base == new_base);
+ assert(type->tp_base == best_base);
set_tp_bases(type, old_bases, 0);
type->tp_base = old_base;
Py_DECREF(new_bases);
- Py_DECREF(new_base);
+ Py_DECREF(best_base);
}
else {
Py_DECREF(old_bases);
@@ -1848,9 +1967,13 @@ static int
type_set_bases(PyObject *tp, PyObject *new_bases, void *Py_UNUSED(closure))
{
PyTypeObject *type = PyTypeObject_CAST(tp);
+ PyTypeObject *best_base;
int res;
BEGIN_TYPE_LOCK();
- res = type_set_bases_unlocked(type, new_bases);
+ res = type_check_new_bases(type, new_bases, &best_base);
+ if (res == 0) {
+ res = type_set_bases_unlocked(type, new_bases, best_base);
+ }
END_TYPE_LOCK();
return res;
}
@@ -3078,6 +3201,7 @@ static PyObject *
class_name(PyObject *cls)
{
PyObject *name;
+ // Note that this is potentially re-entrant.
if (PyObject_GetOptionalAttr(cls, &_Py_ID(__name__), &name) == 0) {
name = PyObject_Repr(cls);
}
@@ -3414,9 +3538,13 @@ mro_invoke(PyTypeObject *type)
const int custom = !Py_IS_TYPE(type, &PyType_Type);
if (custom) {
+ // Custom mro() method on metaclass. This is potentially re-entrant.
+ // We are called either from type_ready() or from type_set_bases().
mro_result = call_method_noarg((PyObject *)type, &_Py_ID(mro));
}
else {
+ // In this case, the mro() method on the type object is being used and
+ // we know that these calls are not re-entrant.
mro_result = mro_implementation_unlocked(type);
}
if (mro_result == NULL)
@@ -3464,7 +3592,7 @@ mro_invoke(PyTypeObject *type)
- Returns -1 in case of an error.
*/
static int
-mro_internal_unlocked(PyTypeObject *type, int initial, PyObject **p_old_mro)
+mro_internal(PyTypeObject *type, int initial, PyObject **p_old_mro)
{
ASSERT_TYPE_LOCK_HELD();
@@ -3512,21 +3640,11 @@ mro_internal_unlocked(PyTypeObject *type, int initial, PyObject **p_old_mro)
return 1;
}
-static int
-mro_internal(PyTypeObject *type, PyObject **p_old_mro)
-{
- int res;
- BEGIN_TYPE_LOCK();
- res = mro_internal_unlocked(type, 0, p_old_mro);
- END_TYPE_LOCK();
- return res;
-}
-
/* Calculate the best base amongst multiple base classes.
This is the first one that's on the path to the "solid base". */
static PyTypeObject *
-best_base(PyObject *bases)
+find_best_base(PyObject *bases)
{
Py_ssize_t i, n;
PyTypeObject *base, *winner, *candidate;
@@ -3608,13 +3726,167 @@ solid_base(PyTypeObject *type)
}
}
+#ifdef Py_GIL_DISABLED
+
+// The structures and functions below are used in the free-threaded build
+// to safely make updates to type slots, on type_setattro() for a slot
+// or when __bases__ is re-assigned. Since the slots are read without atomic
+// operations and without locking, we can only safely update them while the
+// world is stopped. However, with the world stopped, we are very limited on
+// which APIs can be safely used. For example, calling _PyObject_HashFast()
+// or _PyDict_GetItemRef_KnownHash() are not safe and can potentially cause
+// deadlocks. Hashing can be re-entrant and _PyDict_GetItemRef_KnownHash can
+// acquire a lock if the dictionary is not owned by the current thread, to
+// mark it shared on reading.
+//
+// We do the slot updates in two steps. First, with TYPE_LOCK held, we lookup
+// the descriptor for each slot, for each subclass. We build a queue of
+// updates to perform but don't actually update the type structures. After we
+// are finished the lookups, we stop-the-world and apply all of the updates.
+// The apply_slot_updates() code is simple and easy to confirm that it is
+// safe.
+
+typedef struct {
+ PyTypeObject *type;
+ void **slot_ptr;
+ void *slot_value;
+} slot_update_item_t;
+
+// The number of slot updates performed is based on the number of changed
+// slots and the number of subclasses. It's possible there are many updates
+// required if there are many subclasses (potentially an unbounded amount).
+// Usually the number of slot updates is small, most often zero or one. When
+// running the unit tests, we don't exceed 20. The chunk size is set to
+// handle the common case with a single chunk and to not require too many
+// chunk allocations if there are many subclasses.
+#define SLOT_UPDATE_CHUNK_SIZE 30
+
+typedef struct _slot_update {
+ struct _slot_update *prev;
+ Py_ssize_t n;
+ slot_update_item_t updates[SLOT_UPDATE_CHUNK_SIZE];
+} slot_update_chunk_t;
+
+// a queue of updates to be performed
+typedef struct {
+ slot_update_chunk_t *head;
+} slot_update_t;
+
+static slot_update_chunk_t *
+slot_update_new_chunk(void)
+{
+ slot_update_chunk_t *chunk = PyMem_Malloc(sizeof(slot_update_chunk_t));
+ if (chunk == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ chunk->prev = NULL;
+ chunk->n = 0;
+ return chunk;
+}
+
+static void
+slot_update_free_chunks(slot_update_t *updates)
+{
+ slot_update_chunk_t *chunk = updates->head;
+ while (chunk != NULL) {
+ slot_update_chunk_t *prev = chunk->prev;
+ PyMem_Free(chunk);
+ chunk = prev;
+ }
+}
+
+static int
+queue_slot_update(slot_update_t *updates, PyTypeObject *type,
+ void **slot_ptr, void *slot_value)
+{
+ if (*slot_ptr == slot_value) {
+ return 0; // slot pointer not actually changed, don't queue update
+ }
+ if (updates->head == NULL || updates->head->n == SLOT_UPDATE_CHUNK_SIZE) {
+ slot_update_chunk_t *chunk = slot_update_new_chunk();
+ if (chunk == NULL) {
+ return -1; // out-of-memory
+ }
+ chunk->prev = updates->head;
+ updates->head = chunk;
+ }
+ slot_update_item_t *item = &updates->head->updates[updates->head->n];
+ item->type = type;
+ item->slot_ptr = slot_ptr;
+ item->slot_value = slot_value;
+ updates->head->n++;
+ assert(updates->head->n <= SLOT_UPDATE_CHUNK_SIZE);
+ return 0;
+}
+
+static void
+apply_slot_updates(slot_update_t *updates)
+{
+ assert(types_world_is_stopped());
+ slot_update_chunk_t *chunk = updates->head;
+ while (chunk != NULL) {
+ for (Py_ssize_t i = 0; i < chunk->n; i++) {
+ slot_update_item_t *item = &chunk->updates[i];
+ *(item->slot_ptr) = item->slot_value;
+ if (item->slot_value == slot_tp_call) {
+ /* A generic __call__ is incompatible with vectorcall */
+ type_clear_flags(item->type, Py_TPFLAGS_HAVE_VECTORCALL);
+ }
+ }
+ chunk = chunk->prev;
+ }
+}
+
+static void
+apply_type_slot_updates(slot_update_t *updates)
+{
+ // This must be done carefully to avoid data races and deadlocks. We
+ // have just updated the type __dict__, while holding TYPE_LOCK. We have
+ // collected all of the required type slot updates into the 'updates'
+ // queue. Note that those updates can apply to multiple types since
+ // subclasses might also be affected by the dict change.
+ //
+ // We need to prevent other threads from writing to the dict before we can
+ // finish updating the slots. The actual stores to the slots are done
+ // with the world stopped. If we block on the stop-the-world mutex then
+ // we could release TYPE_LOCK mutex and potentially allow other threads
+ // to update the dict. That's because TYPE_LOCK was acquired using a
+ // critical section.
+ //
+ // The type_lock_prevent_release() call prevents the TYPE_LOCK mutex from
+ // being released even if we block on the STM mutex. We need to take care
+ // that we do not deadlock because of that. It is safe because we always
+ // acquire locks in the same order: first the TYPE_LOCK mutex and then the
+ // STM mutex.
+ type_lock_prevent_release();
+ types_stop_world();
+ apply_slot_updates(updates);
+ types_start_world();
+ type_lock_allow_release();
+}
+
+#else
+
+// dummy definition, this parameter is only NULL in the default build
+typedef void slot_update_t;
+
+#endif
+
+/// data passed to update_slots_callback()
+typedef struct {
+ slot_update_t *queued_updates;
+ pytype_slotdef **defs;
+} update_callback_data_t;
+
static void object_dealloc(PyObject *);
static PyObject *object_new(PyTypeObject *, PyObject *, PyObject *);
static int object_init(PyObject *, PyObject *, PyObject *);
-static int update_slot(PyTypeObject *, PyObject *);
+static int update_slot(PyTypeObject *, PyObject *, slot_update_t *update);
static void fixup_slot_dispatchers(PyTypeObject *);
static int type_new_set_names(PyTypeObject *);
static int type_new_init_subclass(PyTypeObject *, PyObject *);
+static bool has_slotdef(PyObject *);
/*
* Helpers for __dict__ descriptor. We don't want to expose the dicts
@@ -3676,10 +3948,39 @@ subtype_dict(PyObject *obj, void *context)
return PyObject_GenericGetDict(obj, context);
}
+int
+_PyObject_SetDict(PyObject *obj, PyObject *value)
+{
+ if (value != NULL && !PyDict_Check(value)) {
+ PyErr_Format(PyExc_TypeError,
+ "__dict__ must be set to a dictionary, "
+ "not a '%.200s'", Py_TYPE(value)->tp_name);
+ return -1;
+ }
+ if (Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT) {
+ return _PyObject_SetManagedDict(obj, value);
+ }
+ PyObject **dictptr = _PyObject_ComputedDictPointer(obj);
+ if (dictptr == NULL) {
+ PyErr_SetString(PyExc_AttributeError,
+ "This object has no __dict__");
+ return -1;
+ }
+ Py_BEGIN_CRITICAL_SECTION(obj);
+ PyObject *olddict = *dictptr;
+ FT_ATOMIC_STORE_PTR_RELEASE(*dictptr, Py_NewRef(value));
+#ifdef Py_GIL_DISABLED
+ _PyObject_XDecRefDelayed(olddict);
+#else
+ Py_XDECREF(olddict);
+#endif
+ Py_END_CRITICAL_SECTION();
+ return 0;
+}
+
static int
subtype_setdict(PyObject *obj, PyObject *value, void *context)
{
- PyObject **dictptr;
PyTypeObject *base;
base = get_builtin_base_with_dict(Py_TYPE(obj));
@@ -3697,28 +3998,7 @@ subtype_setdict(PyObject *obj, PyObject *value, void *context)
}
return func(descr, obj, value);
}
- /* Almost like PyObject_GenericSetDict, but allow __dict__ to be deleted. */
- if (value != NULL && !PyDict_Check(value)) {
- PyErr_Format(PyExc_TypeError,
- "__dict__ must be set to a dictionary, "
- "not a '%.200s'", Py_TYPE(value)->tp_name);
- return -1;
- }
-
- if (Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT) {
- return _PyObject_SetManagedDict(obj, value);
- }
- else {
- dictptr = _PyObject_ComputedDictPointer(obj);
- if (dictptr == NULL) {
- PyErr_SetString(PyExc_AttributeError,
- "This object has no __dict__");
- return -1;
- }
- Py_CLEAR(*dictptr);
- *dictptr = Py_XNewRef(value);
- }
- return 0;
+ return _PyObject_SetDict(obj, value);
}
static PyObject *
@@ -3812,7 +4092,7 @@ type_init(PyObject *cls, PyObject *args, PyObject *kwds)
unsigned long
PyType_GetFlags(PyTypeObject *type)
{
- return FT_ATOMIC_LOAD_ULONG_RELAXED(type->tp_flags);
+ return type->tp_flags;
}
@@ -4590,6 +4870,10 @@ type_new_impl(type_new_ctx *ctx)
}
assert(_PyType_CheckConsistency(type));
+#if defined(Py_GIL_DISABLED) && defined(Py_DEBUG) && SIZEOF_VOID_P > 4
+ // After this point, other threads can potentally use this type.
+ ((PyObject*)type)->ob_flags |= _Py_TYPE_REVEALED_FLAG;
+#endif
return (PyObject *)type;
@@ -4652,7 +4936,7 @@ type_new_get_bases(type_new_ctx *ctx, PyObject **type)
}
/* Calculate best base, and check that all bases are type objects */
- PyTypeObject *base = best_base(ctx->bases);
+ PyTypeObject *base = find_best_base(ctx->bases);
if (base == NULL) {
return -1;
}
@@ -5067,12 +5351,12 @@ PyType_FromMetaclass(
}
/* Calculate best base, and check that all bases are type objects */
- PyTypeObject *base = best_base(bases); // borrowed ref
+ PyTypeObject *base = find_best_base(bases); // borrowed ref
if (base == NULL) {
goto finally;
}
- // best_base should check Py_TPFLAGS_BASETYPE & raise a proper exception,
- // here we just check its work
+ // find_best_base() should check Py_TPFLAGS_BASETYPE & raise a proper
+ // exception, here we just check its work
assert(_PyType_HasFeature(base, Py_TPFLAGS_BASETYPE));
/* Calculate sizes */
@@ -5303,6 +5587,10 @@ PyType_FromMetaclass(
}
assert(_PyType_CheckConsistency(type));
+#if defined(Py_GIL_DISABLED) && defined(Py_DEBUG) && SIZEOF_VOID_P > 4
+ // After this point, other threads can potentally use this type.
+ ((PyObject*)type)->ob_flags |= _Py_TYPE_REVEALED_FLAG;
+#endif
finally:
if (PyErr_Occurred()) {
@@ -5598,8 +5886,6 @@ PyObject_GetItemData(PyObject *obj)
static PyObject *
find_name_in_mro(PyTypeObject *type, PyObject *name, int *error)
{
- ASSERT_TYPE_LOCK_HELD();
-
Py_hash_t hash = _PyObject_HashFast(name);
if (hash == -1) {
*error = -1;
@@ -5908,9 +6194,13 @@ _PyType_CacheGetItemForSpecialization(PyHeapTypeObject *ht, PyObject *descriptor
void
_PyType_SetFlags(PyTypeObject *self, unsigned long mask, unsigned long flags)
{
- BEGIN_TYPE_LOCK();
- type_set_flags_with_mask(self, mask, flags);
- END_TYPE_LOCK();
+ unsigned long new_flags = (self->tp_flags & ~mask) | flags;
+ if (new_flags != self->tp_flags) {
+ types_stop_world();
+ // can't use new_flags here since they could be out-of-date
+ self->tp_flags = (self->tp_flags & ~mask) | flags;
+ types_start_world();
+ }
}
int
@@ -5957,9 +6247,9 @@ set_flags_recursive(PyTypeObject *self, unsigned long mask, unsigned long flags)
void
_PyType_SetFlagsRecursive(PyTypeObject *self, unsigned long mask, unsigned long flags)
{
- BEGIN_TYPE_LOCK();
+ types_stop_world();
set_flags_recursive(self, mask, flags);
- END_TYPE_LOCK();
+ types_start_world();
}
/* This is similar to PyObject_GenericGetAttr(),
@@ -6073,6 +6363,8 @@ _Py_type_getattro(PyObject *tp, PyObject *name)
return _Py_type_getattro_impl(type, name, NULL);
}
+// Called by type_setattro(). Updates both the type dict and
+// the type versions.
static int
type_update_dict(PyTypeObject *type, PyDictObject *dict, PyObject *name,
PyObject *value, PyObject **old_value)
@@ -6102,10 +6394,30 @@ type_update_dict(PyTypeObject *type, PyDictObject *dict, PyObject *name,
return -1;
}
- if (is_dunder_name(name)) {
- return update_slot(type, name);
- }
+ return 0;
+}
+
+static int
+update_slot_after_setattr(PyTypeObject *type, PyObject *name)
+{
+#ifdef Py_GIL_DISABLED
+ // stack allocate one chunk since that's all we need
+ assert(SLOT_UPDATE_CHUNK_SIZE >= MAX_EQUIV);
+ slot_update_chunk_t chunk = {0};
+ slot_update_t queued_updates = {&chunk};
+ if (update_slot(type, name, &queued_updates) < 0) {
+ return -1;
+ }
+ if (queued_updates.head->n > 0) {
+ apply_type_slot_updates(&queued_updates);
+ ASSERT_TYPE_LOCK_HELD();
+ // should never allocate another chunk
+ assert(chunk.prev == NULL);
+ }
+#else
+ update_slot(type, name, NULL);
+#endif
return 0;
}
@@ -6163,7 +6475,9 @@ type_setattro(PyObject *self, PyObject *name, PyObject *value)
PyObject *dict = type->tp_dict;
if (dict == NULL) {
- // We don't just do PyType_Ready because we could already be readying
+ // This is an unlikely case. PyType_Ready has not yet been done and
+ // we need to initialize tp_dict. We don't just do PyType_Ready
+ // because we could already be readying.
BEGIN_TYPE_LOCK();
dict = type->tp_dict;
if (dict == NULL) {
@@ -6179,6 +6493,12 @@ type_setattro(PyObject *self, PyObject *name, PyObject *value)
BEGIN_TYPE_DICT_LOCK(dict);
res = type_update_dict(type, (PyDictObject *)dict, name, value, &old_value);
assert(_PyType_CheckConsistency(type));
+ if (res == 0) {
+ if (is_dunder_name(name) && has_slotdef(name)) {
+ // The name corresponds to a type slot.
+ res = update_slot_after_setattr(type, name);
+ }
+ }
END_TYPE_DICT_LOCK();
done:
@@ -7108,15 +7428,10 @@ object_set_class(PyObject *self, PyObject *value, void *closure)
return -1;
}
-#ifdef Py_GIL_DISABLED
- PyInterpreterState *interp = _PyInterpreterState_GET();
- _PyEval_StopTheWorld(interp);
-#endif
+ types_stop_world();
PyTypeObject *oldto = Py_TYPE(self);
int res = object_set_class_world_stopped(self, newto);
-#ifdef Py_GIL_DISABLED
- _PyEval_StartTheWorld(interp);
-#endif
+ types_start_world();
if (res == 0) {
if (oldto->tp_flags & Py_TPFLAGS_HEAPTYPE) {
Py_DECREF(oldto);
@@ -8524,7 +8839,7 @@ type_ready_mro(PyTypeObject *type, int initial)
}
/* Calculate method resolution order */
- if (mro_internal_unlocked(type, initial, NULL) < 0) {
+ if (mro_internal(type, initial, NULL) < 0) {
return -1;
}
PyObject *mro = lookup_tp_mro(type);
@@ -11047,12 +11362,21 @@ resolve_slotdups(PyTypeObject *type, PyObject *name)
{
/* XXX Maybe this could be optimized more -- but is it worth it? */
+#ifdef Py_GIL_DISABLED
+ pytype_slotdef *ptrs[MAX_EQUIV];
+ pytype_slotdef **pp = ptrs;
+ /* Collect all slotdefs that match name into ptrs. */
+ for (pytype_slotdef *p = slotdefs; p->name_strobj; p++) {
+ if (p->name_strobj == name)
+ *pp++ = p;
+ }
+ *pp = NULL;
+#else
/* pname and ptrs act as a little cache */
PyInterpreterState *interp = _PyInterpreterState_GET();
#define pname _Py_INTERP_CACHED_OBJECT(interp, type_slots_pname)
#define ptrs _Py_INTERP_CACHED_OBJECT(interp, type_slots_ptrs)
pytype_slotdef *p, **pp;
- void **res, **ptr;
if (pname != name) {
/* Collect all slotdefs that match name into ptrs. */
@@ -11064,10 +11388,12 @@ resolve_slotdups(PyTypeObject *type, PyObject *name)
}
*pp = NULL;
}
+#endif
/* Look in all slots of the type matching the name. If exactly one of these
has a filled-in slot, return a pointer to that slot.
Otherwise, return NULL. */
+ void **res, **ptr;
res = NULL;
for (pp = ptrs; *pp; pp++) {
ptr = slotptr(type, (*pp)->offset);
@@ -11077,11 +11403,25 @@ resolve_slotdups(PyTypeObject *type, PyObject *name)
return NULL;
res = ptr;
}
- return res;
+#ifndef Py_GIL_DISABLED
#undef pname
#undef ptrs
+#endif
+ return res;
}
+// Return true if "name" corresponds to at least one slot definition. This is
+// a more accurate but more expensive test compared to is_dunder_name().
+static bool
+has_slotdef(PyObject *name)
+{
+ for (pytype_slotdef *p = slotdefs; p->name_strobj; p++) {
+ if (p->name_strobj == name) {
+ return true;
+ }
+ }
+ return false;
+}
/* Common code for update_slots_callback() and fixup_slot_dispatchers().
*
@@ -11134,13 +11474,22 @@ resolve_slotdups(PyTypeObject *type, PyObject *name)
* There are some further special cases for specific slots, like supporting
* __hash__ = None for tp_hash and special code for tp_new.
*
- * When done, return a pointer to the next slotdef with a different offset,
- * because that's convenient for fixup_slot_dispatchers(). This function never
- * sets an exception: if an internal error happens (unlikely), it's ignored. */
-static pytype_slotdef *
-update_one_slot(PyTypeObject *type, pytype_slotdef *p)
+ * When done, next_p is set to the next slotdef with a different offset,
+ * because that's convenient for fixup_slot_dispatchers().
+ *
+ * If the queued_updates pointer is provided, the actual updates to the slot
+ * pointers are queued, rather than being immediately performed. That argument
+ * is only used for the free-threaded build since those updates need to be
+ * done while the world is stopped.
+ *
+ * This function will only return an error if the queued_updates argument is
+ * provided and allocating memory for the queue fails. Other exceptions that
+ * occur internally are ignored, such as when looking up descriptors. */
+static int
+update_one_slot(PyTypeObject *type, pytype_slotdef *p, pytype_slotdef **next_p,
+ slot_update_t *queued_updates)
{
- ASSERT_TYPE_LOCK_HELD();
+ ASSERT_NEW_TYPE_OR_LOCKED(type);
PyObject *descr;
PyWrapperDescrObject *d;
@@ -11163,7 +11512,10 @@ update_one_slot(PyTypeObject *type, pytype_slotdef *p)
do {
++p;
} while (p->offset == offset);
- return p;
+ if (next_p != NULL) {
+ *next_p = p;
+ }
+ return 0;
}
/* We may end up clearing live exceptions below, so make sure it's ours. */
assert(!PyErr_Occurred());
@@ -11246,16 +11598,41 @@ update_one_slot(PyTypeObject *type, pytype_slotdef *p)
}
if (p->function == slot_tp_call) {
/* A generic __call__ is incompatible with vectorcall */
- type_clear_flags(type, Py_TPFLAGS_HAVE_VECTORCALL);
+ if (queued_updates == NULL) {
+ type_clear_flags(type, Py_TPFLAGS_HAVE_VECTORCALL);
+ }
}
}
Py_DECREF(descr);
} while ((++p)->offset == offset);
- if (specific && !use_generic)
- *ptr = specific;
- else
- *ptr = generic;
- return p;
+
+ void *slot_value;
+ if (specific && !use_generic) {
+ slot_value = specific;
+ } else {
+ slot_value = generic;
+ }
+
+#ifdef Py_GIL_DISABLED
+ if (queued_updates != NULL) {
+ // queue the update to perform later, while world is stopped
+ if (queue_slot_update(queued_updates, type, ptr, slot_value) < 0) {
+ return -1;
+ }
+ } else {
+ // do the update to the type structure now
+ *ptr = slot_value;
+ }
+#else
+ // always do the update immediately
+ assert(queued_updates == NULL);
+ *ptr = slot_value;
+#endif
+
+ if (next_p != NULL) {
+ *next_p = p;
+ }
+ return 0;
}
/* In the type, update the slots whose slotdefs are gathered in the pp array.
@@ -11263,18 +11640,21 @@ update_one_slot(PyTypeObject *type, pytype_slotdef *p)
static int
update_slots_callback(PyTypeObject *type, void *data)
{
- ASSERT_TYPE_LOCK_HELD();
+ ASSERT_NEW_TYPE_OR_LOCKED(type);
- pytype_slotdef **pp = (pytype_slotdef **)data;
+ update_callback_data_t *update_data = (update_callback_data_t *)data;
+ pytype_slotdef **pp = update_data->defs;
for (; *pp; pp++) {
- update_one_slot(type, *pp);
+ if (update_one_slot(type, *pp, NULL, update_data->queued_updates) < 0) {
+ return -1;
+ }
}
return 0;
}
/* Update the slots after assignment to a class (type) attribute. */
static int
-update_slot(PyTypeObject *type, PyObject *name)
+update_slot(PyTypeObject *type, PyObject *name, slot_update_t *queued_updates)
{
pytype_slotdef *ptrs[MAX_EQUIV];
pytype_slotdef *p;
@@ -11305,8 +11685,12 @@ update_slot(PyTypeObject *type, PyObject *name)
}
if (ptrs[0] == NULL)
return 0; /* Not an attribute that affects any slots */
+
+ update_callback_data_t callback_data;
+ callback_data.defs = ptrs;
+ callback_data.queued_updates = queued_updates;
return update_subclasses(type, name,
- update_slots_callback, (void *)ptrs);
+ update_slots_callback, (void *)&callback_data);
}
/* Store the proper functions in the slot dispatches at class (type)
@@ -11315,35 +11699,56 @@ update_slot(PyTypeObject *type, PyObject *name)
static void
fixup_slot_dispatchers(PyTypeObject *type)
{
- // This lock isn't strictly necessary because the type has not been
- // exposed to anyone else yet, but update_ont_slot calls find_name_in_mro
- // where we'd like to assert that the type is locked.
- BEGIN_TYPE_LOCK();
-
assert(!PyErr_Occurred());
for (pytype_slotdef *p = slotdefs; p->name; ) {
- p = update_one_slot(type, p);
+ update_one_slot(type, p, &p, NULL);
}
-
- END_TYPE_LOCK();
}
-static void
+#ifdef Py_GIL_DISABLED
+
+// Called when __bases__ is re-assigned.
+static int
update_all_slots(PyTypeObject* type)
{
- pytype_slotdef *p;
-
- ASSERT_TYPE_LOCK_HELD();
+ // Note that update_slot() can fail due to out-of-memory when allocating
+ // the queue chunks to hold the updates. That's unlikely since the number
+ // of updates is normally small but we handle that case. update_slot()
+ // can fail internally for other reasons (a lookup fails) but those
+ // errors are suppressed.
+ slot_update_t queued_updates = {0};
+ for (pytype_slotdef *p = slotdefs; p->name; p++) {
+ if (update_slot(type, p->name_strobj, &queued_updates) < 0) {
+ if (queued_updates.head) {
+ slot_update_free_chunks(&queued_updates);
+ }
+ return -1;
+ }
+ }
+ if (queued_updates.head != NULL) {
+ apply_type_slot_updates(&queued_updates);
+ ASSERT_TYPE_LOCK_HELD();
+ slot_update_free_chunks(&queued_updates);
+ }
+ return 0;
+}
- /* Clear the VALID_VERSION flag of 'type' and all its subclasses. */
- type_modified_unlocked(type);
+#else
+// Called when __bases__ is re-assigned.
+static int
+update_all_slots(PyTypeObject* type)
+{
+ pytype_slotdef *p;
for (p = slotdefs; p->name; p++) {
- /* update_slot returns int but can't actually fail */
- update_slot(type, p->name_strobj);
+ /* update_slot returns int but can't actually fail in this case*/
+ update_slot(type, p->name_strobj, NULL);
}
+ return 0;
}
+#endif
+
PyObject *
_PyType_GetSlotWrapperNames(void)
@@ -11613,7 +12018,10 @@ PyType_Freeze(PyTypeObject *type)
}
BEGIN_TYPE_LOCK();
+ types_stop_world();
type_add_flags(type, Py_TPFLAGS_IMMUTABLETYPE);
+ types_start_world();
+ ASSERT_TYPE_LOCK_HELD();
type_modified_unlocked(type);
END_TYPE_LOCK();
diff --git a/Objects/typevarobject.c b/Objects/typevarobject.c
index 6c199a52aa0..cead6e69af5 100644
--- a/Objects/typevarobject.c
+++ b/Objects/typevarobject.c
@@ -192,7 +192,7 @@ constevaluator_call(PyObject *self, PyObject *args, PyObject *kwargs)
for (Py_ssize_t i = 0; i < PyTuple_GET_SIZE(value); i++) {
PyObject *item = PyTuple_GET_ITEM(value, i);
if (i > 0) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ", 2) < 0) {
PyUnicodeWriter_Discard(writer);
return NULL;
}
@@ -273,7 +273,7 @@ _Py_typing_type_repr(PyUnicodeWriter *writer, PyObject *p)
}
if (p == (PyObject *)&_PyNone_Type) {
- return PyUnicodeWriter_WriteUTF8(writer, "None", 4);
+ return PyUnicodeWriter_WriteASCII(writer, "None", 4);
}
if ((rc = PyObject_HasAttrWithError(p, &_Py_ID(__origin__))) > 0 &&
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index aa94fb91e65..5611f839627 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -14083,6 +14083,20 @@ _PyUnicodeWriter_WriteASCIIString(_PyUnicodeWriter *writer,
return 0;
}
+
+int
+PyUnicodeWriter_WriteASCII(PyUnicodeWriter *writer,
+ const char *str,
+ Py_ssize_t size)
+{
+ assert(writer != NULL);
+ _Py_AssertHoldsTstate();
+
+ _PyUnicodeWriter *priv_writer = (_PyUnicodeWriter*)writer;
+ return _PyUnicodeWriter_WriteASCIIString(priv_writer, str, size);
+}
+
+
int
PyUnicodeWriter_WriteUTF8(PyUnicodeWriter *writer,
const char *str,
diff --git a/Objects/unionobject.c b/Objects/unionobject.c
index 66435924b6c..00ca5b9bf80 100644
--- a/Objects/unionobject.c
+++ b/Objects/unionobject.c
@@ -290,7 +290,7 @@ union_repr(PyObject *self)
}
for (Py_ssize_t i = 0; i < len; i++) {
- if (i > 0 && PyUnicodeWriter_WriteUTF8(writer, " | ", 3) < 0) {
+ if (i > 0 && PyUnicodeWriter_WriteASCII(writer, " | ", 3) < 0) {
goto error;
}
PyObject *p = PyTuple_GET_ITEM(alias->args, i);
@@ -300,12 +300,12 @@ union_repr(PyObject *self)
}
#if 0
- PyUnicodeWriter_WriteUTF8(writer, "|args=", 6);
+ PyUnicodeWriter_WriteASCII(writer, "|args=", 6);
PyUnicodeWriter_WriteRepr(writer, alias->args);
- PyUnicodeWriter_WriteUTF8(writer, "|h=", 3);
+ PyUnicodeWriter_WriteASCII(writer, "|h=", 3);
PyUnicodeWriter_WriteRepr(writer, alias->hashable_args);
if (alias->unhashable_args) {
- PyUnicodeWriter_WriteUTF8(writer, "|u=", 3);
+ PyUnicodeWriter_WriteASCII(writer, "|u=", 3);
PyUnicodeWriter_WriteRepr(writer, alias->unhashable_args);
}
#endif
diff --git a/PC/python3dll.c b/PC/python3dll.c
index f0c578e11c6..8ec791f8280 100755
--- a/PC/python3dll.c
+++ b/PC/python3dll.c
@@ -595,7 +595,11 @@ EXPORT_FUNC(PySys_Audit)
EXPORT_FUNC(PySys_AuditTuple)
EXPORT_FUNC(PySys_FormatStderr)
EXPORT_FUNC(PySys_FormatStdout)
+EXPORT_FUNC(PySys_GetAttr)
+EXPORT_FUNC(PySys_GetAttrString)
EXPORT_FUNC(PySys_GetObject)
+EXPORT_FUNC(PySys_GetOptionalAttr)
+EXPORT_FUNC(PySys_GetOptionalAttrString)
EXPORT_FUNC(PySys_GetXOptions)
EXPORT_FUNC(PySys_HasWarnOptions)
EXPORT_FUNC(PySys_ResetWarnOptions)
diff --git a/PCbuild/_testclinic_limited.vcxproj b/PCbuild/_testclinic_limited.vcxproj
index 183a55080e8..95c205309b1 100644
--- a/PCbuild/_testclinic_limited.vcxproj
+++ b/PCbuild/_testclinic_limited.vcxproj
@@ -70,6 +70,7 @@
<ProjectGuid>{01FDF29A-40A1-46DF-84F5-85EBBD2A2410}</ProjectGuid>
<RootNamespace>_testclinic_limited</RootNamespace>
<Keyword>Win32Proj</Keyword>
+ <SupportPGO>false</SupportPGO>
</PropertyGroup>
<Import Project="python.props" />
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
diff --git a/PCbuild/get_external.py b/PCbuild/get_external.py
index 4ecc8925349..8c1155c74a6 100755
--- a/PCbuild/get_external.py
+++ b/PCbuild/get_external.py
@@ -9,6 +9,25 @@ import zipfile
from urllib.request import urlretrieve
+def retrieve_with_retries(download_location, output_path, reporthook,
+ max_retries=7):
+ """Download a file with exponential backoff retry and save to disk."""
+ for attempt in range(max_retries + 1):
+ try:
+ resp = urlretrieve(
+ download_location,
+ output_path,
+ reporthook=reporthook,
+ )
+ except ConnectionError as ex:
+ if attempt == max_retries:
+ msg = f"Download from {download_location} failed."
+ raise OSError(msg) from ex
+ time.sleep(2.25**attempt)
+ else:
+ return resp
+
+
def fetch_zip(commit_hash, zip_dir, *, org='python', binary=False, verbose):
repo = f'cpython-{"bin" if binary else "source"}-deps'
url = f'https://github.com/{org}/{repo}/archive/{commit_hash}.zip'
@@ -16,10 +35,10 @@ def fetch_zip(commit_hash, zip_dir, *, org='python', binary=False, verbose):
if verbose:
reporthook = print
zip_dir.mkdir(parents=True, exist_ok=True)
- filename, headers = urlretrieve(
+ filename, _headers = retrieve_with_retries(
url,
zip_dir / f'{commit_hash}.zip',
- reporthook=reporthook,
+ reporthook
)
return filename
diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props
index 7272542e13a..ce51e342241 100644
--- a/PCbuild/pyproject.props
+++ b/PCbuild/pyproject.props
@@ -96,19 +96,16 @@
<TargetMachine Condition="'$(Platform)' == 'x64'">MachineX64</TargetMachine>
<TargetMachine Condition="'$(Platform)'=='ARM'">MachineARM</TargetMachine>
<TargetMachine Condition="'$(Platform)'=='ARM64'">MachineARM64</TargetMachine>
- <ProfileGuidedDatabase Condition="$(SupportPGO)">$(OutDir)$(TargetName).pgd</ProfileGuidedDatabase>
- <LinkTimeCodeGeneration Condition="$(Configuration) == 'Release'">UseLinkTimeCodeGeneration</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGInstrument'">PGInstrument</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGUpdate'">PGUpdate</LinkTimeCodeGeneration>
+ <LinkTimeCodeGeneration Condition="$(Configuration) != 'Debug'">UseLinkTimeCodeGeneration</LinkTimeCodeGeneration>
<AdditionalDependencies>advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalOptions Condition="$(Configuration) != 'Debug'">/OPT:REF,NOICF %(AdditionalOptions)</AdditionalOptions>
<AdditionalOptions Condition="$(MSVCHasBrokenARM64Clamping) == 'true' and $(Platform) == 'ARM64'">-d2:-pattern-opt-disable:-932189325 %(AdditionalOptions)</AdditionalOptions>
+ <AdditionalOptions Condition="$(SupportPGO) and $(Configuration) == 'PGInstrument'">/GENPROFILE %(AdditionalOptions)</AdditionalOptions>
+ <AdditionalOptions Condition="$(SupportPGO) and $(Configuration) == 'PGUpdate'">/USEPROFILE %(AdditionalOptions)</AdditionalOptions>
</Link>
<Lib>
<LinkTimeCodeGeneration>false</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(Configuration) == 'Release'">true</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGInstrument'">true</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGUpdate'">true</LinkTimeCodeGeneration>
+ <LinkTimeCodeGeneration Condition="$(Configuration) != 'Debug'">true</LinkTimeCodeGeneration>
</Lib>
<ResourceCompile>
<AdditionalIncludeDirectories>$(PySourcePath)PC;$(PySourcePath)Include;$(IntDir);%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py
index 22dcfe1b0d9..dba20226c32 100755
--- a/Parser/asdl_c.py
+++ b/Parser/asdl_c.py
@@ -1512,7 +1512,7 @@ ast_repr_list(PyObject *list, int depth)
for (Py_ssize_t i = 0; i < Py_MIN(length, 2); i++) {
if (i > 0) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ", 2) < 0) {
goto error;
}
}
@@ -1536,7 +1536,7 @@ ast_repr_list(PyObject *list, int depth)
}
if (i == 0 && length > 2) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ...", 5) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ...", 5) < 0) {
goto error;
}
}
@@ -1640,7 +1640,7 @@ ast_repr_max_depth(AST_object *self, int depth)
}
if (i > 0) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ", 2) < 0) {
Py_DECREF(name);
Py_DECREF(value_repr);
goto error;
diff --git a/Parser/parser.c b/Parser/parser.c
index 84a293cddff..d5aafef826e 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -14,7 +14,7 @@
# define MAXSTACK 4000
# endif
#else
-# define MAXSTACK 4000
+# define MAXSTACK 6000
#endif
static const int n_keyword_lists = 9;
static KeywordToken *reserved_keywords[] = {
diff --git a/Programs/test_frozenmain.h b/Programs/test_frozenmain.h
index b7d23f57018..dbeedb7ffe0 100644
--- a/Programs/test_frozenmain.h
+++ b/Programs/test_frozenmain.h
@@ -1,6 +1,6 @@
// Auto-generated by Programs/freeze_test_frozenmain.py
unsigned char M_test_frozenmain[] = {
- 227,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,
+ 227,0,0,0,0,0,0,0,0,0,0,0,0,9,0,0,
0,0,0,0,0,243,184,0,0,0,128,0,94,0,82,1,
73,0,116,0,94,0,82,1,73,1,116,1,93,2,33,0,
82,2,52,1,0,0,0,0,0,0,31,0,93,2,33,0,
diff --git a/Python/Python-ast.c b/Python/Python-ast.c
index f7625ab1205..660bc598a48 100644
--- a/Python/Python-ast.c
+++ b/Python/Python-ast.c
@@ -5796,7 +5796,7 @@ ast_repr_list(PyObject *list, int depth)
for (Py_ssize_t i = 0; i < Py_MIN(length, 2); i++) {
if (i > 0) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ", 2) < 0) {
goto error;
}
}
@@ -5820,7 +5820,7 @@ ast_repr_list(PyObject *list, int depth)
}
if (i == 0 && length > 2) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ...", 5) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ...", 5) < 0) {
goto error;
}
}
@@ -5924,7 +5924,7 @@ ast_repr_max_depth(AST_object *self, int depth)
}
if (i > 0) {
- if (PyUnicodeWriter_WriteUTF8(writer, ", ", 2) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, ", ", 2) < 0) {
Py_DECREF(name);
Py_DECREF(value_repr);
goto error;
diff --git a/Python/_warnings.c b/Python/_warnings.c
index 39bf1b225cc..12e6172b0cf 100644
--- a/Python/_warnings.c
+++ b/Python/_warnings.c
@@ -6,7 +6,6 @@
#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_pylifecycle.h" // _Py_IsInterpreterFinalizing()
#include "pycore_pystate.h" // _PyThreadState_GET()
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttr()
#include "pycore_traceback.h" // _Py_DisplaySourceLine()
#include "pycore_unicodeobject.h" // _PyUnicode_EqualToASCIIString()
@@ -678,7 +677,7 @@ show_warning(PyThreadState *tstate, PyObject *filename, int lineno,
goto error;
}
- if (_PySys_GetOptionalAttr(&_Py_ID(stderr), &f_stderr) <= 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stderr), &f_stderr) <= 0) {
fprintf(stderr, "lost sys.stderr\n");
goto error;
}
diff --git a/Python/asm_trampoline.S b/Python/asm_trampoline.S
index 0a3265dfeee..616752459ba 100644
--- a/Python/asm_trampoline.S
+++ b/Python/asm_trampoline.S
@@ -9,6 +9,9 @@
# }
_Py_trampoline_func_start:
#ifdef __x86_64__
+#if defined(__CET__) && (__CET__ & 1)
+ endbr64
+#endif
sub $8, %rsp
call *%rcx
add $8, %rsp
@@ -34,3 +37,22 @@ _Py_trampoline_func_start:
.globl _Py_trampoline_func_end
_Py_trampoline_func_end:
.section .note.GNU-stack,"",@progbits
+# Note for indicating the assembly code supports CET
+#if defined(__x86_64__) && defined(__CET__) && (__CET__ & 1)
+ .section .note.gnu.property,"a"
+ .align 8
+ .long 1f - 0f
+ .long 4f - 1f
+ .long 5
+0:
+ .string "GNU"
+1:
+ .align 8
+ .long 0xc0000002
+ .long 3f - 2f
+2:
+ .long 0x3
+3:
+ .align 8
+4:
+#endif // __x86_64__
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
index 3d0295ee388..e08c63924ca 100644
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -14,7 +14,6 @@
#include "pycore_pyerrors.h" // _PyErr_NoMemory()
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_pythonrun.h" // _Py_SourceAsString()
-#include "pycore_sysmodule.h" // _PySys_GetRequiredAttr()
#include "pycore_tuple.h" // _PyTuple_FromArray()
#include "pycore_cell.h" // PyCell_GetRef()
@@ -465,7 +464,7 @@ builtin_callable(PyObject *module, PyObject *obj)
static PyObject *
builtin_breakpoint(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *keywords)
{
- PyObject *hook = _PySys_GetRequiredAttrString("breakpointhook");
+ PyObject *hook = PySys_GetAttrString("breakpointhook");
if (hook == NULL) {
return NULL;
}
@@ -2164,7 +2163,7 @@ builtin_print_impl(PyObject *module, PyObject * const *args,
int i, err;
if (file == Py_None) {
- file = _PySys_GetRequiredAttr(&_Py_ID(stdout));
+ file = PySys_GetAttr(&_Py_ID(stdout));
if (file == NULL) {
return NULL;
}
@@ -2270,7 +2269,7 @@ builtin_input_impl(PyObject *module, PyObject *prompt)
int tty;
/* Check that stdin/out/err are intact */
- fin = _PySys_GetRequiredAttr(&_Py_ID(stdin));
+ fin = PySys_GetAttr(&_Py_ID(stdin));
if (fin == NULL) {
goto error;
}
@@ -2278,7 +2277,7 @@ builtin_input_impl(PyObject *module, PyObject *prompt)
PyErr_SetString(PyExc_RuntimeError, "lost sys.stdin");
goto error;
}
- fout = _PySys_GetRequiredAttr(&_Py_ID(stdout));
+ fout = PySys_GetAttr(&_Py_ID(stdout));
if (fout == NULL) {
goto error;
}
@@ -2286,7 +2285,7 @@ builtin_input_impl(PyObject *module, PyObject *prompt)
PyErr_SetString(PyExc_RuntimeError, "lost sys.stdout");
goto error;
}
- ferr = _PySys_GetRequiredAttr(&_Py_ID(stderr));
+ ferr = PySys_GetAttr(&_Py_ID(stderr));
if (ferr == NULL) {
goto error;
}
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index be22c7446f5..f02e32fd1d3 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -341,7 +341,7 @@ dummy_func(
}
pure inst(POP_TOP, (value --)) {
- PyStackRef_CLOSE(value);
+ PyStackRef_XCLOSE(value);
}
tier2 op(_POP_TWO, (nos, tos --)) {
@@ -362,9 +362,14 @@ dummy_func(
PyStackRef_CLOSE(value);
}
- macro(POP_ITER) = POP_TOP;
- no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) {
+ inst(POP_ITER, (iter, index_or_null -- )) {
+ (void)index_or_null;
+ DEAD(index_or_null);
+ PyStackRef_CLOSE(iter);
+ }
+
+ no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, index_or_null, value -- receiver, index_or_null)) {
/* Need to create a fake StopIteration error here,
* to conform to PEP 380 */
if (PyStackRef_GenCheck(receiver)) {
@@ -376,7 +381,9 @@ dummy_func(
PyStackRef_CLOSE(value);
}
- tier1 inst(INSTRUMENTED_POP_ITER, (iter -- )) {
+ tier1 inst(INSTRUMENTED_POP_ITER, (iter, index_or_null -- )) {
+ (void)index_or_null;
+ DEAD(index_or_null);
INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT);
PyStackRef_CLOSE(iter);
}
@@ -3041,15 +3048,24 @@ dummy_func(
values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o);
}
- inst(GET_ITER, (iterable -- iter)) {
+ inst(GET_ITER, (iterable -- iter, index_or_null)) {
#ifdef Py_STATS
_Py_GatherStats_GetIter(iterable);
#endif
/* before: [obj]; after [getiter(obj)] */
- PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
- PyStackRef_CLOSE(iterable);
- ERROR_IF(iter_o == NULL);
- iter = PyStackRef_FromPyObjectSteal(iter_o);
+ PyTypeObject *tp = PyStackRef_TYPE(iterable);
+ if (tp == &PyTuple_Type || tp == &PyList_Type) {
+ iter = iterable;
+ DEAD(iterable);
+ index_or_null = PyStackRef_TagInt(0);
+ }
+ else {
+ PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
+ PyStackRef_CLOSE(iterable);
+ ERROR_IF(iter_o == NULL);
+ iter = PyStackRef_FromPyObjectSteal(iter_o);
+ index_or_null = PyStackRef_NULL;
+ }
}
inst(GET_YIELD_FROM_ITER, (iterable -- iter)) {
@@ -3096,11 +3112,11 @@ dummy_func(
FOR_ITER_GEN,
};
- specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter -- iter)) {
+ specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter, null_or_index -- iter, null_or_index)) {
#if ENABLE_SPECIALIZATION_FT
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
next_instr = this_instr;
- _Py_Specialize_ForIter(iter, next_instr, oparg);
+ _Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg);
DISPATCH_SAME_OPARG();
}
OPCODE_DEFERRED_INC(FOR_ITER);
@@ -3108,33 +3124,43 @@ dummy_func(
#endif /* ENABLE_SPECIALIZATION_FT */
}
- replaced op(_FOR_ITER, (iter -- iter, next)) {
+ replaced op(_FOR_ITER, (iter, null_or_index -- iter, null_or_index, next)) {
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- if (next_o == NULL) {
- if (_PyErr_Occurred(tstate)) {
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- if (!matches) {
- ERROR_NO_POP();
+ if (PyStackRef_IsTaggedInt(null_or_index)) {
+ next = _PyForIter_NextWithIndex(iter_o, null_or_index);
+ if (PyStackRef_IsNull(next)) {
+ JUMPBY(oparg + 1);
+ DISPATCH();
+ }
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
+ }
+ else {
+ PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
+ if (next_o == NULL) {
+ if (_PyErr_Occurred(tstate)) {
+ int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
+ if (!matches) {
+ ERROR_NO_POP();
+ }
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
+ _PyErr_Clear(tstate);
}
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
+ /* iterator ended normally */
+ assert(next_instr[oparg].op.code == END_FOR ||
+ next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
+ /* Jump forward oparg, then skip following END_FOR */
+ JUMPBY(oparg + 1);
+ DISPATCH();
}
- /* iterator ended normally */
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- /* Jump forward oparg, then skip following END_FOR */
- JUMPBY(oparg + 1);
- DISPATCH();
+ next = PyStackRef_FromPyObjectSteal(next_o);
}
- next = PyStackRef_FromPyObjectSteal(next_o);
- // Common case: no jump, leave it to the code generator
}
- op(_FOR_ITER_TIER_TWO, (iter -- iter, next)) {
+ op(_FOR_ITER_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) {
/* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
+ EXIT_IF(!PyStackRef_IsNull(null_or_index));
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
if (next_o == NULL) {
if (_PyErr_Occurred(tstate)) {
@@ -3156,63 +3182,64 @@ dummy_func(
macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER;
- inst(INSTRUMENTED_FOR_ITER, (unused/1, iter -- iter, next)) {
+ inst(INSTRUMENTED_FOR_ITER, (unused/1, iter, null_or_index -- iter, null_or_index, next)) {
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- if (next_o != NULL) {
- next = PyStackRef_FromPyObjectSteal(next_o);
+ if (PyStackRef_IsTaggedInt(null_or_index)) {
+ next = _PyForIter_NextWithIndex(iter_o, null_or_index);
+ if (PyStackRef_IsNull(next)) {
+ JUMPBY(oparg + 1);
+ DISPATCH();
+ }
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
}
else {
- if (_PyErr_Occurred(tstate)) {
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- if (!matches) {
- ERROR_NO_POP();
+ PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
+ if (next_o != NULL) {
+ next = PyStackRef_FromPyObjectSteal(next_o);
+ INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
+ }
+ else {
+ if (_PyErr_Occurred(tstate)) {
+ int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
+ if (!matches) {
+ ERROR_NO_POP();
+ }
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
+ _PyErr_Clear(tstate);
}
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
+ /* iterator ended normally */
+ assert(next_instr[oparg].op.code == END_FOR ||
+ next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
+ /* Skip END_FOR */
+ JUMPBY(oparg + 1);
+ DISPATCH();
}
- /* iterator ended normally */
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- /* Skip END_FOR */
- JUMPBY(oparg + 1);
- DISPATCH();
}
}
- op(_ITER_CHECK_LIST, (iter -- iter)) {
+ op(_ITER_CHECK_LIST, (iter, null_or_index -- iter, null_or_index)) {
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- EXIT_IF(Py_TYPE(iter_o) != &PyListIter_Type);
+ EXIT_IF(Py_TYPE(iter_o) != &PyList_Type);
+ assert(PyStackRef_IsTaggedInt(null_or_index));
#ifdef Py_GIL_DISABLED
- EXIT_IF(!_PyObject_IsUniquelyReferenced(iter_o));
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- EXIT_IF(!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) ||
- !_PyObject_GC_IS_SHARED(it->it_seq));
+ EXIT_IF(!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o));
#endif
}
- replaced op(_ITER_JUMP_LIST, (iter -- iter)) {
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
-// For free-threaded Python, the loop exit can happen at any point during
-// item retrieval, so it doesn't make much sense to check and jump
-// separately before item retrieval. Any length check we do here can be
-// invalid by the time we actually try to fetch the item.
+ replaced op(_ITER_JUMP_LIST, (iter, null_or_index -- iter, null_or_index)) {
#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- (void)iter_o;
+ // For free-threaded Python, the loop exit can happen at any point during
+ // item retrieval, so it doesn't make much sense to check and jump
+ // separately before item retrieval. Any length check we do here can be
+ // invalid by the time we actually try to fetch the item.
#else
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(list_o) == &PyList_Type);
STAT_INC(FOR_ITER, hit);
- PyListObject *seq = it->it_seq;
- if (seq == NULL || (size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
- it->it_index = -1;
- if (seq != NULL) {
- it->it_seq = NULL;
- Py_DECREF(seq);
- }
+ if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)) {
+ null_or_index = PyStackRef_TagInt(-1);
/* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(oparg + 1);
DISPATCH();
@@ -3221,73 +3248,54 @@ dummy_func(
}
// Only used by Tier 2
- op(_GUARD_NOT_EXHAUSTED_LIST, (iter -- iter)) {
+ op(_GUARD_NOT_EXHAUSTED_LIST, (iter, null_or_index -- iter, null_or_index)) {
#ifndef Py_GIL_DISABLED
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
- PyListObject *seq = it->it_seq;
- EXIT_IF(seq == NULL);
- if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
- it->it_index = -1;
- EXIT_IF(1);
- }
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(list_o) == &PyList_Type);
+ EXIT_IF((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o));
#endif
}
- replaced op(_ITER_NEXT_LIST, (iter -- iter, next)) {
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
- PyListObject *seq = it->it_seq;
- assert(seq);
+ replaced op(_ITER_NEXT_LIST, (iter, null_or_index -- iter, null_or_index, next)) {
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(PyList_CheckExact(list_o));
#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
- _PyObject_GC_IS_SHARED(seq));
+ assert(_Py_IsOwnedByCurrentThread(list_o) ||
+ _PyObject_GC_IS_SHARED(list_o));
STAT_INC(FOR_ITER, hit);
- int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
+ int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
// A negative result means we lost a race with another thread
// and we need to take the slow path.
DEOPT_IF(result < 0);
if (result == 0) {
- it->it_index = -1;
+ null_or_index = PyStackRef_TagInt(-1);
/* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(oparg + 1);
DISPATCH();
}
- it->it_index++;
#else
- assert(it->it_index < PyList_GET_SIZE(seq));
- next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
+ next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
#endif
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
}
// Only used by Tier 2
- op(_ITER_NEXT_LIST_TIER_TWO, (iter -- iter, next)) {
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
- PyListObject *seq = it->it_seq;
- assert(seq);
+ op(_ITER_NEXT_LIST_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) {
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(PyList_CheckExact(list_o));
#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
- _PyObject_GC_IS_SHARED(seq));
+ assert(_Py_IsOwnedByCurrentThread((PyObject *)list_o) ||
+ _PyObject_GC_IS_SHARED(list_o));
STAT_INC(FOR_ITER, hit);
- int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
+ int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
// A negative result means we lost a race with another thread
// and we need to take the slow path.
- EXIT_IF(result < 0);
- if (result == 0) {
- it->it_index = -1;
- EXIT_IF(1);
- }
- it->it_index++;
+ DEOPT_IF(result <= 0);
#else
- assert(it->it_index < PyList_GET_SIZE(seq));
- next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
+ assert(PyStackRef_UntagInt(null_or_index) < PyList_GET_SIZE(list_o));
+ next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
#endif
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
}
macro(FOR_ITER_LIST) =
@@ -3296,31 +3304,19 @@ dummy_func(
_ITER_JUMP_LIST +
_ITER_NEXT_LIST;
- op(_ITER_CHECK_TUPLE, (iter -- iter)) {
+ op(_ITER_CHECK_TUPLE, (iter, null_or_index -- iter, null_or_index)) {
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- EXIT_IF(Py_TYPE(iter_o) != &PyTupleIter_Type);
-#ifdef Py_GIL_DISABLED
- EXIT_IF(!_PyObject_IsUniquelyReferenced(iter_o));
-#endif
+ EXIT_IF(Py_TYPE(iter_o) != &PyTuple_Type);
+ assert(PyStackRef_IsTaggedInt(null_or_index));
}
- replaced op(_ITER_JUMP_TUPLE, (iter -- iter)) {
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- (void)iter_o;
- assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
-#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
-#endif
- _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
+ replaced op(_ITER_JUMP_TUPLE, (iter, null_or_index -- iter, null_or_index)) {
+ PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
+ (void)tuple_o;
+ assert(Py_TYPE(tuple_o) == &PyTuple_Type);
STAT_INC(FOR_ITER, hit);
- PyTupleObject *seq = it->it_seq;
- if (seq == NULL || (size_t)it->it_index >= (size_t)PyTuple_GET_SIZE(seq)) {
-#ifndef Py_GIL_DISABLED
- if (seq != NULL) {
- it->it_seq = NULL;
- Py_DECREF(seq);
- }
-#endif
+ if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)) {
+ null_or_index = PyStackRef_TagInt(-1);
/* Jump forward oparg, then skip following END_FOR instruction */
JUMPBY(oparg + 1);
DISPATCH();
@@ -3328,29 +3324,19 @@ dummy_func(
}
// Only used by Tier 2
- op(_GUARD_NOT_EXHAUSTED_TUPLE, (iter -- iter)) {
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
-#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
-#endif
- PyTupleObject *seq = it->it_seq;
- EXIT_IF(seq == NULL);
- EXIT_IF(it->it_index >= PyTuple_GET_SIZE(seq));
+ op(_GUARD_NOT_EXHAUSTED_TUPLE, (iter, null_or_index -- iter, null_or_index)) {
+ PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(tuple_o) == &PyTuple_Type);
+ EXIT_IF((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o));
}
- op(_ITER_NEXT_TUPLE, (iter -- iter, next)) {
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
- PyTupleObject *seq = it->it_seq;
-#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
-#endif
- assert(seq);
- assert(it->it_index < PyTuple_GET_SIZE(seq));
- next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++));
+ op(_ITER_NEXT_TUPLE, (iter, null_or_index -- iter, null_or_index, next)) {
+ PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(tuple_o) == &PyTuple_Type);
+ uintptr_t i = PyStackRef_UntagInt(null_or_index);
+ assert((size_t)i < (size_t)PyTuple_GET_SIZE(tuple_o));
+ next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(tuple_o, i));
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
}
macro(FOR_ITER_TUPLE) =
@@ -3359,7 +3345,7 @@ dummy_func(
_ITER_JUMP_TUPLE +
_ITER_NEXT_TUPLE;
- op(_ITER_CHECK_RANGE, (iter -- iter)) {
+ op(_ITER_CHECK_RANGE, (iter, null_or_index -- iter, null_or_index)) {
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
EXIT_IF(Py_TYPE(r) != &PyRangeIter_Type);
#ifdef Py_GIL_DISABLED
@@ -3367,7 +3353,7 @@ dummy_func(
#endif
}
- replaced op(_ITER_JUMP_RANGE, (iter -- iter)) {
+ replaced op(_ITER_JUMP_RANGE, (iter, null_or_index -- iter, null_or_index)) {
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
assert(Py_TYPE(r) == &PyRangeIter_Type);
#ifdef Py_GIL_DISABLED
@@ -3382,13 +3368,13 @@ dummy_func(
}
// Only used by Tier 2
- op(_GUARD_NOT_EXHAUSTED_RANGE, (iter -- iter)) {
+ op(_GUARD_NOT_EXHAUSTED_RANGE, (iter, null_or_index -- iter, null_or_index)) {
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
assert(Py_TYPE(r) == &PyRangeIter_Type);
EXIT_IF(r->len <= 0);
}
- op(_ITER_NEXT_RANGE, (iter -- iter, next)) {
+ op(_ITER_NEXT_RANGE, (iter, null_or_index -- iter, null_or_index, next)) {
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
assert(Py_TYPE(r) == &PyRangeIter_Type);
#ifdef Py_GIL_DISABLED
@@ -3409,7 +3395,7 @@ dummy_func(
_ITER_JUMP_RANGE +
_ITER_NEXT_RANGE;
- op(_FOR_ITER_GEN_FRAME, (iter -- iter, gen_frame: _PyInterpreterFrame*)) {
+ op(_FOR_ITER_GEN_FRAME, (iter, null -- iter, null, gen_frame: _PyInterpreterFrame*)) {
PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter);
DEOPT_IF(Py_TYPE(gen) != &PyGen_Type);
#ifdef Py_GIL_DISABLED
diff --git a/Python/ceval.c b/Python/ceval.c
index d1b6b9f1a1a..7aec196cb85 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -139,6 +139,19 @@
#endif
+static void
+check_invalid_reentrancy(void)
+{
+#if defined(Py_DEBUG) && defined(Py_GIL_DISABLED)
+ // In the free-threaded build, the interpreter must not be re-entered if
+ // the world-is-stopped. If so, that's a bug somewhere (quite likely in
+ // the painfully complex typeobject code).
+ PyInterpreterState *interp = _PyInterpreterState_GET();
+ assert(!interp->stoptheworld.world_stopped);
+#endif
+}
+
+
#ifdef Py_DEBUG
static void
dump_item(_PyStackRef item)
@@ -996,6 +1009,7 @@ PyObject* _Py_HOT_FUNCTION DONT_SLP_VECTORIZE
_PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int throwflag)
{
_Py_EnsureTstateNotNULL(tstate);
+ check_invalid_reentrancy();
CALL_STAT_INC(pyeval_calls);
#if USE_COMPUTED_GOTOS && !Py_TAIL_CALL_INTERP
@@ -2965,7 +2979,7 @@ _PyEval_ImportFrom(PyThreadState *tstate, PyObject *v, PyObject *name)
int is_possibly_shadowing_stdlib = 0;
if (is_possibly_shadowing) {
PyObject *stdlib_modules;
- if (_PySys_GetOptionalAttrString("stdlib_module_names", &stdlib_modules) < 0) {
+ if (PySys_GetOptionalAttrString("stdlib_module_names", &stdlib_modules) < 0) {
goto done;
}
if (stdlib_modules && PyAnySet_Check(stdlib_modules)) {
@@ -3425,6 +3439,26 @@ _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *na
return value;
}
+_PyStackRef
+_PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index)
+{
+ assert(PyStackRef_IsTaggedInt(index));
+ assert(PyTuple_CheckExact(seq) || PyList_CheckExact(seq));
+ intptr_t i = PyStackRef_UntagInt(index);
+ if (PyTuple_CheckExact(seq)) {
+ size_t size = PyTuple_GET_SIZE(seq);
+ if ((size_t)i >= size) {
+ return PyStackRef_NULL;
+ }
+ return PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, i));
+ }
+ PyObject *item = _PyList_GetItemRef((PyListObject *)seq, i);
+ if (item == NULL) {
+ return PyStackRef_NULL;
+ }
+ return PyStackRef_FromPyObjectSteal(item);
+}
+
/* Check if a 'cls' provides the given special method. */
static inline int
type_has_special_method(PyTypeObject *cls, PyObject *name)
diff --git a/Python/codegen.c b/Python/codegen.c
index 683601103ec..0023d72cd5e 100644
--- a/Python/codegen.c
+++ b/Python/codegen.c
@@ -527,6 +527,15 @@ codegen_unwind_fblock(compiler *c, location *ploc,
case COMPILE_FBLOCK_FOR_LOOP:
/* Pop the iterator */
if (preserve_tos) {
+ ADDOP_I(c, *ploc, SWAP, 3);
+ }
+ ADDOP(c, *ploc, POP_TOP);
+ ADDOP(c, *ploc, POP_TOP);
+ return SUCCESS;
+
+ case COMPILE_FBLOCK_ASYNC_FOR_LOOP:
+ /* Pop the iterator */
+ if (preserve_tos) {
ADDOP_I(c, *ploc, SWAP, 2);
}
ADDOP(c, *ploc, POP_TOP);
@@ -629,7 +638,8 @@ codegen_unwind_fblock_stack(compiler *c, location *ploc,
c, *ploc, "'break', 'continue' and 'return' cannot appear in an except* block");
}
if (loop != NULL && (top->fb_type == COMPILE_FBLOCK_WHILE_LOOP ||
- top->fb_type == COMPILE_FBLOCK_FOR_LOOP)) {
+ top->fb_type == COMPILE_FBLOCK_FOR_LOOP ||
+ top->fb_type == COMPILE_FBLOCK_ASYNC_FOR_LOOP)) {
*loop = top;
return SUCCESS;
}
@@ -2125,7 +2135,7 @@ codegen_async_for(compiler *c, stmt_ty s)
ADDOP(c, LOC(s->v.AsyncFor.iter), GET_AITER);
USE_LABEL(c, start);
- RETURN_IF_ERROR(_PyCompile_PushFBlock(c, loc, COMPILE_FBLOCK_FOR_LOOP, start, end, NULL));
+ RETURN_IF_ERROR(_PyCompile_PushFBlock(c, loc, COMPILE_FBLOCK_ASYNC_FOR_LOOP, start, end, NULL));
/* SETUP_FINALLY to guard the __anext__ call */
ADDOP_JUMP(c, loc, SETUP_FINALLY, except);
@@ -2142,7 +2152,7 @@ codegen_async_for(compiler *c, stmt_ty s)
/* Mark jump as artificial */
ADDOP_JUMP(c, NO_LOCATION, JUMP, start);
- _PyCompile_PopFBlock(c, COMPILE_FBLOCK_FOR_LOOP, start);
+ _PyCompile_PopFBlock(c, COMPILE_FBLOCK_ASYNC_FOR_LOOP, start);
/* Except block for __anext__ */
USE_LABEL(c, except);
@@ -3895,10 +3905,11 @@ maybe_optimize_function_call(compiler *c, expr_ty e, jump_target_label end)
NEW_JUMP_TARGET_LABEL(c, loop);
NEW_JUMP_TARGET_LABEL(c, cleanup);
+ ADDOP(c, loc, PUSH_NULL); // Push NULL index for loop
USE_LABEL(c, loop);
ADDOP_JUMP(c, loc, FOR_ITER, cleanup);
if (const_oparg == CONSTANT_BUILTIN_TUPLE) {
- ADDOP_I(c, loc, LIST_APPEND, 2);
+ ADDOP_I(c, loc, LIST_APPEND, 3);
ADDOP_JUMP(c, loc, JUMP, loop);
}
else {
@@ -4442,13 +4453,12 @@ codegen_sync_comprehension_generator(compiler *c, location loc,
}
if (IS_JUMP_TARGET_LABEL(start)) {
VISIT(c, expr, gen->iter);
- ADDOP(c, LOC(gen->iter), GET_ITER);
}
}
}
if (IS_JUMP_TARGET_LABEL(start)) {
- depth++;
+ depth += 2;
ADDOP(c, LOC(gen->iter), GET_ITER);
USE_LABEL(c, start);
ADDOP_JUMP(c, LOC(gen->iter), FOR_ITER, anchor);
@@ -4543,9 +4553,9 @@ codegen_async_comprehension_generator(compiler *c, location loc,
else {
/* Sub-iter - calculate on the fly */
VISIT(c, expr, gen->iter);
- ADDOP(c, LOC(gen->iter), GET_AITER);
}
}
+ ADDOP(c, LOC(gen->iter), GET_AITER);
USE_LABEL(c, start);
/* Runtime will push a block here, so we need to account for that */
@@ -4757,19 +4767,6 @@ pop_inlined_comprehension_state(compiler *c, location loc,
return SUCCESS;
}
-static inline int
-codegen_comprehension_iter(compiler *c, comprehension_ty comp)
-{
- VISIT(c, expr, comp->iter);
- if (comp->is_async) {
- ADDOP(c, LOC(comp->iter), GET_AITER);
- }
- else {
- ADDOP(c, LOC(comp->iter), GET_ITER);
- }
- return SUCCESS;
-}
-
static int
codegen_comprehension(compiler *c, expr_ty e, int type,
identifier name, asdl_comprehension_seq *generators, expr_ty elt,
@@ -4789,9 +4786,7 @@ codegen_comprehension(compiler *c, expr_ty e, int type,
outermost = (comprehension_ty) asdl_seq_GET(generators, 0);
if (is_inlined) {
- if (codegen_comprehension_iter(c, outermost)) {
- goto error;
- }
+ VISIT(c, expr, outermost->iter);
if (push_inlined_comprehension_state(c, loc, entry, &inline_state)) {
goto error;
}
diff --git a/Python/context.c b/Python/context.c
index dceaae9b429..9927cab915c 100644
--- a/Python/context.c
+++ b/Python/context.c
@@ -979,7 +979,7 @@ contextvar_tp_repr(PyObject *op)
return NULL;
}
- if (PyUnicodeWriter_WriteUTF8(writer, "<ContextVar name=", 17) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, "<ContextVar name=", 17) < 0) {
goto error;
}
if (PyUnicodeWriter_WriteRepr(writer, self->var_name) < 0) {
@@ -987,7 +987,7 @@ contextvar_tp_repr(PyObject *op)
}
if (self->var_default != NULL) {
- if (PyUnicodeWriter_WriteUTF8(writer, " default=", 9) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, " default=", 9) < 0) {
goto error;
}
if (PyUnicodeWriter_WriteRepr(writer, self->var_default) < 0) {
@@ -1182,15 +1182,15 @@ token_tp_repr(PyObject *op)
if (writer == NULL) {
return NULL;
}
- if (PyUnicodeWriter_WriteUTF8(writer, "<Token", 6) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, "<Token", 6) < 0) {
goto error;
}
if (self->tok_used) {
- if (PyUnicodeWriter_WriteUTF8(writer, " used", 5) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, " used", 5) < 0) {
goto error;
}
}
- if (PyUnicodeWriter_WriteUTF8(writer, " var=", 5) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, " var=", 5) < 0) {
goto error;
}
if (PyUnicodeWriter_WriteRepr(writer, (PyObject *)self->tok_var) < 0) {
diff --git a/Python/crossinterp.c b/Python/crossinterp.c
index 26eecdddf4b..5e73ab28f2b 100644
--- a/Python/crossinterp.c
+++ b/Python/crossinterp.c
@@ -70,6 +70,17 @@ runpy_run_path(const char *filename, const char *modname)
}
+static void
+set_exc_with_cause(PyObject *exctype, const char *msg)
+{
+ PyObject *cause = PyErr_GetRaisedException();
+ PyErr_SetString(exctype, msg);
+ PyObject *exc = PyErr_GetRaisedException();
+ PyException_SetCause(exc, cause);
+ PyErr_SetRaisedException(exc);
+}
+
+
static PyObject *
pyerr_get_message(PyObject *exc)
{
@@ -908,8 +919,15 @@ get_script_xidata(PyThreadState *tstate, PyObject *obj, int pure,
}
goto error;
}
+#ifdef Py_GIL_DISABLED
+ // Don't immortalize code constants to avoid memory leaks.
+ ((_PyThreadStateImpl *)tstate)->suppress_co_const_immortalization++;
+#endif
code = Py_CompileStringExFlags(
script, filename, Py_file_input, &cf, optimize);
+#ifdef Py_GIL_DISABLED
+ ((_PyThreadStateImpl *)tstate)->suppress_co_const_immortalization--;
+#endif
Py_XDECREF(ref);
if (code == NULL) {
goto error;
@@ -1307,7 +1325,7 @@ _excinfo_normalize_type(struct _excinfo_type *info,
}
static void
-_PyXI_excinfo_Clear(_PyXI_excinfo *info)
+_PyXI_excinfo_clear(_PyXI_excinfo *info)
{
_excinfo_clear_type(&info->type);
if (info->msg != NULL) {
@@ -1357,7 +1375,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc)
assert(exc != NULL);
if (PyErr_GivenExceptionMatches(exc, PyExc_MemoryError)) {
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
return NULL;
}
const char *failure = NULL;
@@ -1403,7 +1421,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc)
error:
assert(failure != NULL);
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
return failure;
}
@@ -1454,7 +1472,7 @@ _PyXI_excinfo_InitFromObject(_PyXI_excinfo *info, PyObject *obj)
error:
assert(failure != NULL);
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
return failure;
}
@@ -1649,7 +1667,7 @@ _PyXI_ExcInfoAsObject(_PyXI_excinfo *info)
void
_PyXI_ClearExcInfo(_PyXI_excinfo *info)
{
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
}
@@ -1687,6 +1705,14 @@ _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp)
PyErr_SetString(PyExc_InterpreterError,
"failed to apply namespace to __main__");
break;
+ case _PyXI_ERR_PRESERVE_FAILURE:
+ PyErr_SetString(PyExc_InterpreterError,
+ "failed to preserve objects across session");
+ break;
+ case _PyXI_ERR_EXC_PROPAGATION_FAILURE:
+ PyErr_SetString(PyExc_InterpreterError,
+ "failed to transfer exception between interpreters");
+ break;
case _PyXI_ERR_NOT_SHAREABLE:
_set_xid_lookup_failure(tstate, NULL, NULL, NULL);
break;
@@ -1736,7 +1762,7 @@ _PyXI_InitError(_PyXI_error *error, PyObject *excobj, _PyXI_errcode code)
assert(excobj == NULL);
assert(code != _PyXI_ERR_NO_ERROR);
error->code = code;
- _PyXI_excinfo_Clear(&error->uncaught);
+ _PyXI_excinfo_clear(&error->uncaught);
}
return failure;
}
@@ -1746,7 +1772,7 @@ _PyXI_ApplyError(_PyXI_error *error)
{
PyThreadState *tstate = PyThreadState_Get();
if (error->code == _PyXI_ERR_UNCAUGHT_EXCEPTION) {
- // Raise an exception that proxies the propagated exception.
+ // We will raise an exception that proxies the propagated exception.
return _PyXI_excinfo_AsObject(&error->uncaught);
}
else if (error->code == _PyXI_ERR_NOT_SHAREABLE) {
@@ -1832,7 +1858,8 @@ _sharednsitem_has_value(_PyXI_namespace_item *item, int64_t *p_interpid)
}
static int
-_sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value)
+_sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value,
+ xidata_fallback_t fallback)
{
assert(_sharednsitem_is_initialized(item));
assert(item->xidata == NULL);
@@ -1841,8 +1868,7 @@ _sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value)
return -1;
}
PyThreadState *tstate = PyThreadState_Get();
- // XXX Use _PyObject_GetXIDataWithFallback()?
- if (_PyObject_GetXIDataNoFallback(tstate, value, item->xidata) != 0) {
+ if (_PyObject_GetXIData(tstate, value, fallback, item->xidata) < 0) {
PyMem_RawFree(item->xidata);
item->xidata = NULL;
// The caller may want to propagate PyExc_NotShareableError
@@ -1874,7 +1900,8 @@ _sharednsitem_clear(_PyXI_namespace_item *item)
}
static int
-_sharednsitem_copy_from_ns(struct _sharednsitem *item, PyObject *ns)
+_sharednsitem_copy_from_ns(struct _sharednsitem *item, PyObject *ns,
+ xidata_fallback_t fallback)
{
assert(item->name != NULL);
assert(item->xidata == NULL);
@@ -1886,7 +1913,7 @@ _sharednsitem_copy_from_ns(struct _sharednsitem *item, PyObject *ns)
// When applied, this item will be set to the default (or fail).
return 0;
}
- if (_sharednsitem_set_value(item, value) < 0) {
+ if (_sharednsitem_set_value(item, value, fallback) < 0) {
return -1;
}
return 0;
@@ -2137,18 +2164,21 @@ error:
return NULL;
}
-static void _propagate_not_shareable_error(_PyXI_session *);
+static void _propagate_not_shareable_error(_PyXI_errcode *);
static int
-_fill_sharedns(_PyXI_namespace *ns, PyObject *nsobj, _PyXI_session *session)
+_fill_sharedns(_PyXI_namespace *ns, PyObject *nsobj,
+ xidata_fallback_t fallback, _PyXI_errcode *p_errcode)
{
// All items are expected to be shareable.
assert(_sharedns_check_counts(ns));
assert(ns->numnames == ns->maxitems);
assert(ns->numvalues == 0);
for (Py_ssize_t i=0; i < ns->maxitems; i++) {
- if (_sharednsitem_copy_from_ns(&ns->items[i], nsobj) < 0) {
- _propagate_not_shareable_error(session);
+ if (_sharednsitem_copy_from_ns(&ns->items[i], nsobj, fallback) < 0) {
+ if (p_errcode != NULL) {
+ _propagate_not_shareable_error(p_errcode);
+ }
// Clear out the ones we set so far.
for (Py_ssize_t j=0; j < i; j++) {
_sharednsitem_clear_value(&ns->items[j]);
@@ -2214,6 +2244,18 @@ _apply_sharedns(_PyXI_namespace *ns, PyObject *nsobj, PyObject *dflt)
/* switched-interpreter sessions */
/*********************************/
+struct xi_session_error {
+ // This is set if the interpreter is entered and raised an exception
+ // that needs to be handled in some special way during exit.
+ _PyXI_errcode *override;
+ // This is set if exit captured an exception to propagate.
+ _PyXI_error *info;
+
+ // -- pre-allocated memory --
+ _PyXI_error _info;
+ _PyXI_errcode _override;
+};
+
struct xi_session {
#define SESSION_UNUSED 0
#define SESSION_ACTIVE 1
@@ -2242,18 +2284,14 @@ struct xi_session {
// beginning of the session as a convenience.
PyObject *main_ns;
- // This is set if the interpreter is entered and raised an exception
- // that needs to be handled in some special way during exit.
- _PyXI_errcode *error_override;
- // This is set if exit captured an exception to propagate.
- _PyXI_error *error;
+ // This is a dict of objects that will be available (via sharing)
+ // once the session exits. Do not access this directly; use
+ // _PyXI_Preserve() and _PyXI_GetPreserved() instead;
+ PyObject *_preserved;
- // -- pre-allocated memory --
- _PyXI_error _error;
- _PyXI_errcode _error_override;
+ struct xi_session_error error;
};
-
_PyXI_session *
_PyXI_NewSession(void)
{
@@ -2279,9 +2317,25 @@ _session_is_active(_PyXI_session *session)
return session->status == SESSION_ACTIVE;
}
-static int _ensure_main_ns(_PyXI_session *);
+static int
+_session_pop_error(_PyXI_session *session, struct xi_session_error *err)
+{
+ if (session->error.info == NULL) {
+ assert(session->error.override == NULL);
+ *err = (struct xi_session_error){0};
+ return 0;
+ }
+ *err = session->error;
+ err->info = &err->_info;
+ if (err->override != NULL) {
+ err->override = &err->_override;
+ }
+ session->error = (struct xi_session_error){0};
+ return 1;
+}
+
+static int _ensure_main_ns(_PyXI_session *, _PyXI_errcode *);
static inline void _session_set_error(_PyXI_session *, _PyXI_errcode);
-static void _capture_current_exception(_PyXI_session *);
/* enter/exit a cross-interpreter session */
@@ -2298,9 +2352,9 @@ _enter_session(_PyXI_session *session, PyInterpreterState *interp)
assert(!session->running);
assert(session->main_ns == NULL);
// Set elsewhere and cleared in _capture_current_exception().
- assert(session->error_override == NULL);
- // Set elsewhere and cleared in _PyXI_ApplyCapturedException().
- assert(session->error == NULL);
+ assert(session->error.override == NULL);
+ // Set elsewhere and cleared in _PyXI_Exit().
+ assert(session->error.info == NULL);
// Switch to interpreter.
PyThreadState *tstate = PyThreadState_Get();
@@ -2329,14 +2383,16 @@ _exit_session(_PyXI_session *session)
PyThreadState *tstate = session->init_tstate;
assert(tstate != NULL);
assert(PyThreadState_Get() == tstate);
+ assert(!_PyErr_Occurred(tstate));
// Release any of the entered interpreters resources.
Py_CLEAR(session->main_ns);
+ Py_CLEAR(session->_preserved);
// Ensure this thread no longer owns __main__.
if (session->running) {
_PyInterpreterState_SetNotRunningMain(tstate->interp);
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
session->running = 0;
}
@@ -2353,21 +2409,16 @@ _exit_session(_PyXI_session *session)
assert(!session->own_init_tstate);
}
- // For now the error data persists past the exit.
- *session = (_PyXI_session){
- .error_override = session->error_override,
- .error = session->error,
- ._error = session->_error,
- ._error_override = session->_error_override,
- };
+ assert(session->error.info == NULL);
+ assert(session->error.override == _PyXI_ERR_NO_ERROR);
+
+ *session = (_PyXI_session){0};
}
static void
-_propagate_not_shareable_error(_PyXI_session *session)
+_propagate_not_shareable_error(_PyXI_errcode *p_errcode)
{
- if (session == NULL) {
- return;
- }
+ assert(p_errcode != NULL);
PyThreadState *tstate = PyThreadState_Get();
PyObject *exctype = get_notshareableerror_type(tstate);
if (exctype == NULL) {
@@ -2377,46 +2428,46 @@ _propagate_not_shareable_error(_PyXI_session *session)
}
if (PyErr_ExceptionMatches(exctype)) {
// We want to propagate the exception directly.
- _session_set_error(session, _PyXI_ERR_NOT_SHAREABLE);
+ *p_errcode = _PyXI_ERR_NOT_SHAREABLE;
}
}
-PyObject *
-_PyXI_ApplyCapturedException(_PyXI_session *session)
-{
- assert(!PyErr_Occurred());
- assert(session->error != NULL);
- PyObject *res = _PyXI_ApplyError(session->error);
- assert((res == NULL) != (PyErr_Occurred() == NULL));
- session->error = NULL;
- return res;
-}
-
-int
-_PyXI_HasCapturedException(_PyXI_session *session)
-{
- return session->error != NULL;
-}
-
int
_PyXI_Enter(_PyXI_session *session,
- PyInterpreterState *interp, PyObject *nsupdates)
+ PyInterpreterState *interp, PyObject *nsupdates,
+ _PyXI_session_result *result)
{
// Convert the attrs for cross-interpreter use.
_PyXI_namespace *sharedns = NULL;
if (nsupdates != NULL) {
Py_ssize_t len = PyDict_Size(nsupdates);
if (len < 0) {
+ if (result != NULL) {
+ result->errcode = _PyXI_ERR_APPLY_NS_FAILURE;
+ }
return -1;
}
if (len > 0) {
sharedns = _create_sharedns(nsupdates);
if (sharedns == NULL) {
+ if (result != NULL) {
+ result->errcode = _PyXI_ERR_APPLY_NS_FAILURE;
+ }
return -1;
}
- if (_fill_sharedns(sharedns, nsupdates, NULL) < 0) {
- assert(session->error == NULL);
+ // For now we limit it to shareable objects.
+ xidata_fallback_t fallback = _PyXIDATA_XIDATA_ONLY;
+ _PyXI_errcode errcode = _PyXI_ERR_NO_ERROR;
+ if (_fill_sharedns(sharedns, nsupdates, fallback, &errcode) < 0) {
+ assert(PyErr_Occurred());
+ assert(session->error.info == NULL);
+ if (errcode == _PyXI_ERR_NO_ERROR) {
+ errcode = _PyXI_ERR_UNCAUGHT_EXCEPTION;
+ }
_destroy_sharedns(sharedns);
+ if (result != NULL) {
+ result->errcode = errcode;
+ }
return -1;
}
}
@@ -2438,8 +2489,7 @@ _PyXI_Enter(_PyXI_session *session,
// Apply the cross-interpreter data.
if (sharedns != NULL) {
- if (_ensure_main_ns(session) < 0) {
- errcode = _PyXI_ERR_MAIN_NS_FAILURE;
+ if (_ensure_main_ns(session, &errcode) < 0) {
goto error;
}
if (_apply_sharedns(sharedns, session->main_ns, NULL) < 0) {
@@ -2455,19 +2505,124 @@ _PyXI_Enter(_PyXI_session *session,
error:
// We want to propagate all exceptions here directly (best effort).
+ assert(errcode != _PyXI_ERR_NO_ERROR);
_session_set_error(session, errcode);
+ assert(!PyErr_Occurred());
+
+ // Exit the session.
+ struct xi_session_error err;
+ (void)_session_pop_error(session, &err);
_exit_session(session);
+
if (sharedns != NULL) {
_destroy_sharedns(sharedns);
}
+
+ // Apply the error from the other interpreter.
+ PyObject *excinfo = _PyXI_ApplyError(err.info);
+ _PyXI_excinfo_clear(&err.info->uncaught);
+ if (excinfo != NULL) {
+ if (result != NULL) {
+ result->excinfo = excinfo;
+ }
+ else {
+#ifdef Py_DEBUG
+ fprintf(stderr, "_PyXI_Enter(): uncaught exception discarded");
+#endif
+ }
+ }
+ assert(PyErr_Occurred());
+
return -1;
}
-void
-_PyXI_Exit(_PyXI_session *session)
+static int _pop_preserved(_PyXI_session *, _PyXI_namespace **, PyObject **,
+ _PyXI_errcode *);
+static int _finish_preserved(_PyXI_namespace *, PyObject **);
+
+int
+_PyXI_Exit(_PyXI_session *session, _PyXI_errcode errcode,
+ _PyXI_session_result *result)
{
- _capture_current_exception(session);
+ int res = 0;
+
+ // Capture the raised exception, if any.
+ assert(session->error.info == NULL);
+ if (PyErr_Occurred()) {
+ _session_set_error(session, errcode);
+ assert(!PyErr_Occurred());
+ }
+ else {
+ assert(errcode == _PyXI_ERR_NO_ERROR);
+ assert(session->error.override == NULL);
+ }
+
+ // Capture the preserved namespace.
+ _PyXI_namespace *preserved = NULL;
+ PyObject *preservedobj = NULL;
+ if (result != NULL) {
+ errcode = _PyXI_ERR_NO_ERROR;
+ if (_pop_preserved(session, &preserved, &preservedobj, &errcode) < 0) {
+ if (session->error.info != NULL) {
+ // XXX Chain the exception (i.e. set __context__)?
+ PyErr_FormatUnraisable(
+ "Exception ignored while capturing preserved objects");
+ }
+ else {
+ _session_set_error(session, errcode);
+ }
+ }
+ }
+
+ // Exit the session.
+ struct xi_session_error err;
+ (void)_session_pop_error(session, &err);
_exit_session(session);
+
+ // Restore the preserved namespace.
+ assert(preserved == NULL || preservedobj == NULL);
+ if (_finish_preserved(preserved, &preservedobj) < 0) {
+ assert(preservedobj == NULL);
+ if (err.info != NULL) {
+ // XXX Chain the exception (i.e. set __context__)?
+ PyErr_FormatUnraisable(
+ "Exception ignored while capturing preserved objects");
+ }
+ else {
+ errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ _propagate_not_shareable_error(&errcode);
+ }
+ }
+ if (result != NULL) {
+ result->preserved = preservedobj;
+ result->errcode = errcode;
+ }
+
+ // Apply the error from the other interpreter, if any.
+ if (err.info != NULL) {
+ res = -1;
+ assert(!PyErr_Occurred());
+ PyObject *excinfo = _PyXI_ApplyError(err.info);
+ _PyXI_excinfo_clear(&err.info->uncaught);
+ if (excinfo == NULL) {
+ assert(PyErr_Occurred());
+ if (result != NULL) {
+ _PyXI_ClearResult(result);
+ *result = (_PyXI_session_result){
+ .errcode = _PyXI_ERR_EXC_PROPAGATION_FAILURE,
+ };
+ }
+ }
+ else if (result != NULL) {
+ result->excinfo = excinfo;
+ }
+ else {
+#ifdef Py_DEBUG
+ fprintf(stderr, "_PyXI_Exit(): uncaught exception discarded");
+#endif
+ }
+ }
+ return res;
}
@@ -2476,15 +2631,15 @@ _PyXI_Exit(_PyXI_session *session)
static void
_capture_current_exception(_PyXI_session *session)
{
- assert(session->error == NULL);
+ assert(session->error.info == NULL);
if (!PyErr_Occurred()) {
- assert(session->error_override == NULL);
+ assert(session->error.override == NULL);
return;
}
// Handle the exception override.
- _PyXI_errcode *override = session->error_override;
- session->error_override = NULL;
+ _PyXI_errcode *override = session->error.override;
+ session->error.override = NULL;
_PyXI_errcode errcode = override != NULL
? *override
: _PyXI_ERR_UNCAUGHT_EXCEPTION;
@@ -2507,7 +2662,7 @@ _capture_current_exception(_PyXI_session *session)
}
// Capture the exception.
- _PyXI_error *err = &session->_error;
+ _PyXI_error *err = &session->error._info;
*err = (_PyXI_error){
.interp = session->init_tstate->interp,
};
@@ -2534,7 +2689,7 @@ _capture_current_exception(_PyXI_session *session)
// Finished!
assert(!PyErr_Occurred());
- session->error = err;
+ session->error.info = err;
}
static inline void
@@ -2542,15 +2697,19 @@ _session_set_error(_PyXI_session *session, _PyXI_errcode errcode)
{
assert(_session_is_active(session));
assert(PyErr_Occurred());
+ if (errcode == _PyXI_ERR_NO_ERROR) {
+ // We're a bit forgiving here.
+ errcode = _PyXI_ERR_UNCAUGHT_EXCEPTION;
+ }
if (errcode != _PyXI_ERR_UNCAUGHT_EXCEPTION) {
- session->_error_override = errcode;
- session->error_override = &session->_error_override;
+ session->error._override = errcode;
+ session->error.override = &session->error._override;
}
_capture_current_exception(session);
}
static int
-_ensure_main_ns(_PyXI_session *session)
+_ensure_main_ns(_PyXI_session *session, _PyXI_errcode *p_errcode)
{
assert(_session_is_active(session));
if (session->main_ns != NULL) {
@@ -2559,11 +2718,17 @@ _ensure_main_ns(_PyXI_session *session)
// Cache __main__.__dict__.
PyObject *main_mod = _Py_GetMainModule(session->init_tstate);
if (_Py_CheckMainModule(main_mod) < 0) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_MAIN_NS_FAILURE;
+ }
return -1;
}
PyObject *ns = PyModule_GetDict(main_mod); // borrowed
Py_DECREF(main_mod);
if (ns == NULL) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_MAIN_NS_FAILURE;
+ }
return -1;
}
session->main_ns = Py_NewRef(ns);
@@ -2571,21 +2736,150 @@ _ensure_main_ns(_PyXI_session *session)
}
PyObject *
-_PyXI_GetMainNamespace(_PyXI_session *session)
+_PyXI_GetMainNamespace(_PyXI_session *session, _PyXI_errcode *p_errcode)
{
if (!_session_is_active(session)) {
PyErr_SetString(PyExc_RuntimeError, "session not active");
return NULL;
}
- if (_ensure_main_ns(session) < 0) {
- _session_set_error(session, _PyXI_ERR_MAIN_NS_FAILURE);
- _capture_current_exception(session);
+ if (_ensure_main_ns(session, p_errcode) < 0) {
return NULL;
}
return session->main_ns;
}
+static int
+_pop_preserved(_PyXI_session *session,
+ _PyXI_namespace **p_xidata, PyObject **p_obj,
+ _PyXI_errcode *p_errcode)
+{
+ assert(_PyThreadState_GET() == session->init_tstate); // active session
+ if (session->_preserved == NULL) {
+ *p_xidata = NULL;
+ *p_obj = NULL;
+ return 0;
+ }
+ if (session->init_tstate == session->prev_tstate) {
+ // We did not switch interpreters.
+ *p_xidata = NULL;
+ *p_obj = session->_preserved;
+ session->_preserved = NULL;
+ return 0;
+ }
+ *p_obj = NULL;
+
+ // We did switch interpreters.
+ Py_ssize_t len = PyDict_Size(session->_preserved);
+ if (len < 0) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ else if (len == 0) {
+ *p_xidata = NULL;
+ }
+ else {
+ _PyXI_namespace *xidata = _create_sharedns(session->_preserved);
+ if (xidata == NULL) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ _PyXI_errcode errcode = _PyXI_ERR_NO_ERROR;
+ if (_fill_sharedns(xidata, session->_preserved,
+ _PyXIDATA_FULL_FALLBACK, &errcode) < 0)
+ {
+ assert(session->error.info == NULL);
+ if (errcode != _PyXI_ERR_NOT_SHAREABLE) {
+ errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ if (p_errcode != NULL) {
+ *p_errcode = errcode;
+ }
+ _destroy_sharedns(xidata);
+ return -1;
+ }
+ *p_xidata = xidata;
+ }
+ Py_CLEAR(session->_preserved);
+ return 0;
+}
+
+static int
+_finish_preserved(_PyXI_namespace *xidata, PyObject **p_preserved)
+{
+ if (xidata == NULL) {
+ return 0;
+ }
+ int res = -1;
+ if (p_preserved != NULL) {
+ PyObject *ns = PyDict_New();
+ if (ns == NULL) {
+ goto finally;
+ }
+ if (_apply_sharedns(xidata, ns, NULL) < 0) {
+ Py_CLEAR(ns);
+ goto finally;
+ }
+ *p_preserved = ns;
+ }
+ res = 0;
+
+finally:
+ _destroy_sharedns(xidata);
+ return res;
+}
+
+int
+_PyXI_Preserve(_PyXI_session *session, const char *name, PyObject *value,
+ _PyXI_errcode *p_errcode)
+{
+ if (!_session_is_active(session)) {
+ PyErr_SetString(PyExc_RuntimeError, "session not active");
+ return -1;
+ }
+ if (session->_preserved == NULL) {
+ session->_preserved = PyDict_New();
+ if (session->_preserved == NULL) {
+ set_exc_with_cause(PyExc_RuntimeError,
+ "failed to initialize preserved objects");
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ }
+ if (PyDict_SetItemString(session->_preserved, name, value) < 0) {
+ set_exc_with_cause(PyExc_RuntimeError, "failed to preserve object");
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ return 0;
+}
+
+PyObject *
+_PyXI_GetPreserved(_PyXI_session_result *result, const char *name)
+{
+ PyObject *value = NULL;
+ if (result->preserved != NULL) {
+ (void)PyDict_GetItemStringRef(result->preserved, name, &value);
+ }
+ return value;
+}
+
+void
+_PyXI_ClearResult(_PyXI_session_result *result)
+{
+ Py_CLEAR(result->preserved);
+ Py_CLEAR(result->excinfo);
+}
+
+
/*********************/
/* runtime lifecycle */
/*********************/
diff --git a/Python/crossinterp_data_lookup.h b/Python/crossinterp_data_lookup.h
index 88eb41da89e..b16f38b847f 100644
--- a/Python/crossinterp_data_lookup.h
+++ b/Python/crossinterp_data_lookup.h
@@ -701,6 +701,14 @@ _PyFunction_FromXIData(_PyXIData_t *xidata)
Py_DECREF(code);
return NULL;
}
+ PyThreadState *tstate = _PyThreadState_GET();
+ if (PyDict_SetItem(globals, &_Py_ID(__builtins__),
+ tstate->interp->builtins) < 0)
+ {
+ Py_DECREF(code);
+ Py_DECREF(globals);
+ return NULL;
+ }
PyObject *func = PyFunction_New(code, globals);
Py_DECREF(code);
Py_DECREF(globals);
diff --git a/Python/errors.c b/Python/errors.c
index 81f267b043a..a3122f76bdd 100644
--- a/Python/errors.c
+++ b/Python/errors.c
@@ -10,7 +10,6 @@
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_runtime.h" // _Py_ID()
#include "pycore_structseq.h" // _PyStructSequence_FiniBuiltin()
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttr()
#include "pycore_traceback.h" // _PyTraceBack_FromFrame()
#include "pycore_unicodeobject.h" // _PyUnicode_Equal()
@@ -1570,7 +1569,7 @@ write_unraisable_exc(PyThreadState *tstate, PyObject *exc_type,
PyObject *obj)
{
PyObject *file;
- if (_PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) {
return -1;
}
if (file == NULL || file == Py_None) {
@@ -1677,7 +1676,7 @@ format_unraisable_v(const char *format, va_list va, PyObject *obj)
}
PyObject *hook;
- if (_PySys_GetOptionalAttr(&_Py_ID(unraisablehook), &hook) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(unraisablehook), &hook) < 0) {
Py_DECREF(hook_args);
err_msg_str = NULL;
obj = NULL;
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 40090e692e4..35b29940cb4 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -534,7 +534,7 @@
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
- PyStackRef_CLOSE(value);
+ PyStackRef_XCLOSE(value);
stack_pointer = _PyFrame_GetStackPointer(frame);
break;
}
@@ -577,6 +577,20 @@
break;
}
+ case _POP_ITER: {
+ _PyStackRef index_or_null;
+ _PyStackRef iter;
+ index_or_null = stack_pointer[-1];
+ iter = stack_pointer[-2];
+ (void)index_or_null;
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ PyStackRef_CLOSE(iter);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ break;
+ }
+
case _END_SEND: {
_PyStackRef value;
_PyStackRef receiver;
@@ -4172,25 +4186,37 @@
case _GET_ITER: {
_PyStackRef iterable;
_PyStackRef iter;
+ _PyStackRef index_or_null;
iterable = stack_pointer[-1];
#ifdef Py_STATS
_PyFrame_SetStackPointer(frame, stack_pointer);
_Py_GatherStats_GetIter(iterable);
stack_pointer = _PyFrame_GetStackPointer(frame);
#endif
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
- stack_pointer = _PyFrame_GetStackPointer(frame);
- stack_pointer += -1;
- assert(WITHIN_STACK_BOUNDS());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyStackRef_CLOSE(iterable);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (iter_o == NULL) {
- JUMP_TO_ERROR();
+
+ PyTypeObject *tp = PyStackRef_TYPE(iterable);
+ if (tp == &PyTuple_Type || tp == &PyList_Type) {
+ iter = iterable;
+ index_or_null = PyStackRef_TagInt(0);
}
- iter = PyStackRef_FromPyObjectSteal(iter_o);
- stack_pointer[0] = iter;
+ else {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ PyStackRef_CLOSE(iterable);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (iter_o == NULL) {
+ JUMP_TO_ERROR();
+ }
+ iter = PyStackRef_FromPyObjectSteal(iter_o);
+ index_or_null = PyStackRef_NULL;
+ stack_pointer += 1;
+ }
+ stack_pointer[-1] = iter;
+ stack_pointer[0] = index_or_null;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
break;
@@ -4237,10 +4263,16 @@
/* _FOR_ITER is not a viable micro-op for tier 2 because it is replaced */
case _FOR_ITER_TIER_TWO: {
+ _PyStackRef null_or_index;
_PyStackRef iter;
_PyStackRef next;
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
+ if (!PyStackRef_IsNull(null_or_index)) {
+ UOP_STAT_INC(uopcode, miss);
+ JUMP_TO_JUMP_TARGET();
+ }
_PyFrame_SetStackPointer(frame, stack_pointer);
PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
stack_pointer = _PyFrame_GetStackPointer(frame);
@@ -4272,21 +4304,18 @@
/* _INSTRUMENTED_FOR_ITER is not a viable micro-op for tier 2 because it is instrumented */
case _ITER_CHECK_LIST: {
+ _PyStackRef null_or_index;
_PyStackRef iter;
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (Py_TYPE(iter_o) != &PyListIter_Type) {
+ if (Py_TYPE(iter_o) != &PyList_Type) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
+ assert(PyStackRef_IsTaggedInt(null_or_index));
#ifdef Py_GIL_DISABLED
- if (!_PyObject_IsUniquelyReferenced(iter_o)) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) ||
- !_PyObject_GC_IS_SHARED(it->it_seq)) {
+ if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
@@ -4297,24 +4326,17 @@
/* _ITER_JUMP_LIST is not a viable micro-op for tier 2 because it is replaced */
case _GUARD_NOT_EXHAUSTED_LIST: {
+ _PyStackRef null_or_index;
_PyStackRef iter;
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
#ifndef Py_GIL_DISABLED
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
- PyListObject *seq = it->it_seq;
- if (seq == NULL) {
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(list_o) == &PyList_Type);
+ if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
- if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
- it->it_index = -1;
- if (1) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- }
#endif
break;
}
@@ -4322,38 +4344,30 @@
/* _ITER_NEXT_LIST is not a viable micro-op for tier 2 because it is replaced */
case _ITER_NEXT_LIST_TIER_TWO: {
+ _PyStackRef null_or_index;
_PyStackRef iter;
_PyStackRef next;
- iter = stack_pointer[-1];
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
- PyListObject *seq = it->it_seq;
- assert(seq);
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(PyList_CheckExact(list_o));
#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
- _PyObject_GC_IS_SHARED(seq));
+ assert(_Py_IsOwnedByCurrentThread((PyObject *)list_o) ||
+ _PyObject_GC_IS_SHARED(list_o));
STAT_INC(FOR_ITER, hit);
_PyFrame_SetStackPointer(frame, stack_pointer);
- int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
+ int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
stack_pointer = _PyFrame_GetStackPointer(frame);
- if (result < 0) {
+ if (result <= 0) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
- if (result == 0) {
- it->it_index = -1;
- if (1) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- }
- it->it_index++;
#else
- assert(it->it_index < PyList_GET_SIZE(seq));
- next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
+ assert(PyStackRef_UntagInt(null_or_index) < PyList_GET_SIZE(list_o));
+ next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
#endif
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@@ -4361,39 +4375,29 @@
}
case _ITER_CHECK_TUPLE: {
+ _PyStackRef null_or_index;
_PyStackRef iter;
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (Py_TYPE(iter_o) != &PyTupleIter_Type) {
+ if (Py_TYPE(iter_o) != &PyTuple_Type) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
- #ifdef Py_GIL_DISABLED
- if (!_PyObject_IsUniquelyReferenced(iter_o)) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- #endif
+ assert(PyStackRef_IsTaggedInt(null_or_index));
break;
}
/* _ITER_JUMP_TUPLE is not a viable micro-op for tier 2 because it is replaced */
case _GUARD_NOT_EXHAUSTED_TUPLE: {
+ _PyStackRef null_or_index;
_PyStackRef iter;
- iter = stack_pointer[-1];
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
- #ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- #endif
- PyTupleObject *seq = it->it_seq;
- if (seq == NULL) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
- if (it->it_index >= PyTuple_GET_SIZE(seq)) {
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
+ PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(tuple_o) == &PyTuple_Type);
+ if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
@@ -4401,19 +4405,18 @@
}
case _ITER_NEXT_TUPLE: {
+ _PyStackRef null_or_index;
_PyStackRef iter;
_PyStackRef next;
- iter = stack_pointer[-1];
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
- PyTupleObject *seq = it->it_seq;
- #ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- #endif
- assert(seq);
- assert(it->it_index < PyTuple_GET_SIZE(seq));
- next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++));
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
+ PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(tuple_o) == &PyTuple_Type);
+ uintptr_t i = PyStackRef_UntagInt(null_or_index);
+ assert((size_t)i < (size_t)PyTuple_GET_SIZE(tuple_o));
+ next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(tuple_o, i));
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@@ -4422,7 +4425,7 @@
case _ITER_CHECK_RANGE: {
_PyStackRef iter;
- iter = stack_pointer[-1];
+ iter = stack_pointer[-2];
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
if (Py_TYPE(r) != &PyRangeIter_Type) {
UOP_STAT_INC(uopcode, miss);
@@ -4441,7 +4444,7 @@
case _GUARD_NOT_EXHAUSTED_RANGE: {
_PyStackRef iter;
- iter = stack_pointer[-1];
+ iter = stack_pointer[-2];
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
assert(Py_TYPE(r) == &PyRangeIter_Type);
if (r->len <= 0) {
@@ -4454,7 +4457,7 @@
case _ITER_NEXT_RANGE: {
_PyStackRef iter;
_PyStackRef next;
- iter = stack_pointer[-1];
+ iter = stack_pointer[-2];
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
assert(Py_TYPE(r) == &PyRangeIter_Type);
#ifdef Py_GIL_DISABLED
@@ -4479,7 +4482,7 @@
_PyStackRef iter;
_PyInterpreterFrame *gen_frame;
oparg = CURRENT_OPARG();
- iter = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter);
if (Py_TYPE(gen) != &PyGen_Type) {
UOP_STAT_INC(uopcode, miss);
diff --git a/Python/flowgraph.c b/Python/flowgraph.c
index 78ef02a911a..67ccf350b72 100644
--- a/Python/flowgraph.c
+++ b/Python/flowgraph.c
@@ -299,26 +299,34 @@ basicblock_returns(const basicblock *b) {
}
static void
-dump_basicblock(const basicblock *b)
+dump_basicblock(const basicblock *b, bool highlight)
{
const char *b_return = basicblock_returns(b) ? "return " : "";
+ if (highlight) {
+ fprintf(stderr, ">>> ");
+ }
fprintf(stderr, "%d: [EH=%d CLD=%d WRM=%d NO_FT=%d %p] used: %d, depth: %d, preds: %d %s\n",
b->b_label.id, b->b_except_handler, b->b_cold, b->b_warm, BB_NO_FALLTHROUGH(b), b, b->b_iused,
b->b_startdepth, b->b_predecessors, b_return);
+ int depth = b->b_startdepth;
if (b->b_instr) {
int i;
for (i = 0; i < b->b_iused; i++) {
- fprintf(stderr, " [%02d] ", i);
+ fprintf(stderr, " [%02d] depth: %d ", i, depth);
dump_instr(b->b_instr + i);
+
+ int popped = _PyOpcode_num_popped(b->b_instr[i].i_opcode, b->b_instr[i].i_oparg);
+ int pushed = _PyOpcode_num_pushed(b->b_instr[i].i_opcode, b->b_instr[i].i_oparg);
+ depth += (pushed - popped);
}
}
}
void
-_PyCfgBuilder_DumpGraph(const basicblock *entryblock)
+_PyCfgBuilder_DumpGraph(const basicblock *entryblock, const basicblock *mark)
{
for (const basicblock *b = entryblock; b != NULL; b = b->b_next) {
- dump_basicblock(b);
+ dump_basicblock(b, b == mark);
}
}
@@ -2863,6 +2871,7 @@ optimize_load_fast(cfg_builder *g)
// Opcodes that consume no inputs
case GET_ANEXT:
+ case GET_ITER:
case GET_LEN:
case IMPORT_FROM:
case MATCH_KEYS:
diff --git a/Python/gc_free_threading.c b/Python/gc_free_threading.c
index d2ea5b5e06b..5aaa68c5b51 100644
--- a/Python/gc_free_threading.c
+++ b/Python/gc_free_threading.c
@@ -2062,7 +2062,7 @@ gc_should_collect_mem_usage(GCState *gcstate)
// 70,000 new container objects.
return true;
}
- Py_ssize_t last_mem = gcstate->last_mem;
+ Py_ssize_t last_mem = _Py_atomic_load_ssize_relaxed(&gcstate->last_mem);
Py_ssize_t mem_threshold = Py_MAX(last_mem / 10, 128);
if ((mem - last_mem) > mem_threshold) {
// The process memory usage has increased too much, do a collection.
@@ -2245,7 +2245,8 @@ gc_collect_internal(PyInterpreterState *interp, struct collection_state *state,
// Store the current memory usage, can be smaller now if breaking cycles
// freed some memory.
- state->gcstate->last_mem = get_process_mem_usage();
+ Py_ssize_t last_mem = get_process_mem_usage();
+ _Py_atomic_store_ssize_relaxed(&state->gcstate->last_mem, last_mem);
// Append objects with legacy finalizers to the "gc.garbage" list.
handle_legacy_finalizers(state);
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 1c98e1ce4fc..e3cd3b71a1d 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -5731,17 +5731,19 @@
_Py_CODEUNIT* const this_instr = next_instr - 2;
(void)this_instr;
_PyStackRef iter;
+ _PyStackRef null_or_index;
_PyStackRef next;
// _SPECIALIZE_FOR_ITER
{
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
uint16_t counter = read_u16(&this_instr[1].cache);
(void)counter;
#if ENABLE_SPECIALIZATION_FT
if (ADAPTIVE_COUNTER_TRIGGERS(counter)) {
next_instr = this_instr;
_PyFrame_SetStackPointer(frame, stack_pointer);
- _Py_Specialize_ForIter(iter, next_instr, oparg);
+ _Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg);
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH_SAME_OPARG();
}
@@ -5752,29 +5754,42 @@
// _FOR_ITER
{
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (next_o == NULL) {
- if (_PyErr_Occurred(tstate)) {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (!matches) {
- JUMP_TO_LABEL(error);
+ if (PyStackRef_IsTaggedInt(null_or_index)) {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ next = _PyForIter_NextWithIndex(iter_o, null_or_index);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (PyStackRef_IsNull(next)) {
+ JUMPBY(oparg + 1);
+ DISPATCH();
+ }
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
+ }
+ else {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (next_o == NULL) {
+ if (_PyErr_Occurred(tstate)) {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (!matches) {
+ JUMP_TO_LABEL(error);
+ }
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
+ _PyErr_Clear(tstate);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
}
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
- stack_pointer = _PyFrame_GetStackPointer(frame);
+ assert(next_instr[oparg].op.code == END_FOR ||
+ next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
+ JUMPBY(oparg + 1);
+ DISPATCH();
}
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- JUMPBY(oparg + 1);
- DISPATCH();
+ next = PyStackRef_FromPyObjectSteal(next_o);
}
- next = PyStackRef_FromPyObjectSteal(next_o);
}
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@@ -5806,7 +5821,7 @@
}
// _FOR_ITER_GEN_FRAME
{
- iter = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter);
if (Py_TYPE(gen) != &PyGen_Type) {
UPDATE_MISS_STATS(FOR_ITER);
@@ -5863,26 +5878,22 @@
INSTRUCTION_STATS(FOR_ITER_LIST);
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
_PyStackRef iter;
+ _PyStackRef null_or_index;
_PyStackRef next;
/* Skip 1 cache entry */
// _ITER_CHECK_LIST
{
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (Py_TYPE(iter_o) != &PyListIter_Type) {
+ if (Py_TYPE(iter_o) != &PyList_Type) {
UPDATE_MISS_STATS(FOR_ITER);
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
JUMP_TO_PREDICTED(FOR_ITER);
}
+ assert(PyStackRef_IsTaggedInt(null_or_index));
#ifdef Py_GIL_DISABLED
- if (!_PyObject_IsUniquelyReferenced(iter_o)) {
- UPDATE_MISS_STATS(FOR_ITER);
- assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
- JUMP_TO_PREDICTED(FOR_ITER);
- }
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) ||
- !_PyObject_GC_IS_SHARED(it->it_seq)) {
+ if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
UPDATE_MISS_STATS(FOR_ITER);
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
JUMP_TO_PREDICTED(FOR_ITER);
@@ -5891,42 +5902,30 @@
}
// _ITER_JUMP_LIST
{
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- (void)iter_o;
+
#else
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(list_o) == &PyList_Type);
STAT_INC(FOR_ITER, hit);
- PyListObject *seq = it->it_seq;
- if (seq == NULL || (size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) {
- it->it_index = -1;
- if (seq != NULL) {
- it->it_seq = NULL;
- _PyFrame_SetStackPointer(frame, stack_pointer);
- Py_DECREF(seq);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- }
+ if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)) {
+ null_or_index = PyStackRef_TagInt(-1);
JUMPBY(oparg + 1);
+ stack_pointer[-1] = null_or_index;
DISPATCH();
}
#endif
}
// _ITER_NEXT_LIST
{
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyListIter_Type);
- PyListObject *seq = it->it_seq;
- assert(seq);
+ PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(PyList_CheckExact(list_o));
#ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) ||
- _PyObject_GC_IS_SHARED(seq));
+ assert(_Py_IsOwnedByCurrentThread(list_o) ||
+ _PyObject_GC_IS_SHARED(list_o));
STAT_INC(FOR_ITER, hit);
_PyFrame_SetStackPointer(frame, stack_pointer);
- int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next);
+ int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next);
stack_pointer = _PyFrame_GetStackPointer(frame);
if (result < 0) {
UPDATE_MISS_STATS(FOR_ITER);
@@ -5934,16 +5933,17 @@
JUMP_TO_PREDICTED(FOR_ITER);
}
if (result == 0) {
- it->it_index = -1;
+ null_or_index = PyStackRef_TagInt(-1);
JUMPBY(oparg + 1);
+ stack_pointer[-1] = null_or_index;
DISPATCH();
}
- it->it_index++;
#else
- assert(it->it_index < PyList_GET_SIZE(seq));
- next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++));
+ next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index)));
#endif
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
}
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@@ -5966,7 +5966,7 @@
/* Skip 1 cache entry */
// _ITER_CHECK_RANGE
{
- iter = stack_pointer[-1];
+ iter = stack_pointer[-2];
_PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter);
if (Py_TYPE(r) != &PyRangeIter_Type) {
UPDATE_MISS_STATS(FOR_ITER);
@@ -6029,63 +6029,44 @@
INSTRUCTION_STATS(FOR_ITER_TUPLE);
static_assert(INLINE_CACHE_ENTRIES_FOR_ITER == 1, "incorrect cache size");
_PyStackRef iter;
+ _PyStackRef null_or_index;
_PyStackRef next;
/* Skip 1 cache entry */
// _ITER_CHECK_TUPLE
{
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (Py_TYPE(iter_o) != &PyTupleIter_Type) {
+ if (Py_TYPE(iter_o) != &PyTuple_Type) {
UPDATE_MISS_STATS(FOR_ITER);
assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
JUMP_TO_PREDICTED(FOR_ITER);
}
- #ifdef Py_GIL_DISABLED
- if (!_PyObject_IsUniquelyReferenced(iter_o)) {
- UPDATE_MISS_STATS(FOR_ITER);
- assert(_PyOpcode_Deopt[opcode] == (FOR_ITER));
- JUMP_TO_PREDICTED(FOR_ITER);
- }
- #endif
+ assert(PyStackRef_IsTaggedInt(null_or_index));
}
// _ITER_JUMP_TUPLE
{
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- (void)iter_o;
- assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
- #ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- #endif
- _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
+ PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
+ (void)tuple_o;
+ assert(Py_TYPE(tuple_o) == &PyTuple_Type);
STAT_INC(FOR_ITER, hit);
- PyTupleObject *seq = it->it_seq;
- if (seq == NULL || (size_t)it->it_index >= (size_t)PyTuple_GET_SIZE(seq)) {
- #ifndef Py_GIL_DISABLED
- if (seq != NULL) {
- it->it_seq = NULL;
- _PyFrame_SetStackPointer(frame, stack_pointer);
- Py_DECREF(seq);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- }
- #endif
-
+ if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)) {
+ null_or_index = PyStackRef_TagInt(-1);
JUMPBY(oparg + 1);
+ stack_pointer[-1] = null_or_index;
DISPATCH();
}
}
// _ITER_NEXT_TUPLE
{
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o;
- assert(Py_TYPE(iter_o) == &PyTupleIter_Type);
- PyTupleObject *seq = it->it_seq;
- #ifdef Py_GIL_DISABLED
- assert(_PyObject_IsUniquelyReferenced(iter_o));
- #endif
- assert(seq);
- assert(it->it_index < PyTuple_GET_SIZE(seq));
- next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++));
+ PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter);
+ assert(Py_TYPE(tuple_o) == &PyTuple_Type);
+ uintptr_t i = PyStackRef_UntagInt(null_or_index);
+ assert((size_t)i < (size_t)PyTuple_GET_SIZE(tuple_o));
+ next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(tuple_o, i));
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
}
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@@ -6216,25 +6197,37 @@
INSTRUCTION_STATS(GET_ITER);
_PyStackRef iterable;
_PyStackRef iter;
+ _PyStackRef index_or_null;
iterable = stack_pointer[-1];
#ifdef Py_STATS
_PyFrame_SetStackPointer(frame, stack_pointer);
_Py_GatherStats_GetIter(iterable);
stack_pointer = _PyFrame_GetStackPointer(frame);
#endif
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
- stack_pointer = _PyFrame_GetStackPointer(frame);
- stack_pointer += -1;
- assert(WITHIN_STACK_BOUNDS());
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyStackRef_CLOSE(iterable);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (iter_o == NULL) {
- JUMP_TO_LABEL(error);
+
+ PyTypeObject *tp = PyStackRef_TYPE(iterable);
+ if (tp == &PyTuple_Type || tp == &PyList_Type) {
+ iter = iterable;
+ index_or_null = PyStackRef_TagInt(0);
}
- iter = PyStackRef_FromPyObjectSteal(iter_o);
- stack_pointer[0] = iter;
+ else {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable));
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ stack_pointer += -1;
+ assert(WITHIN_STACK_BOUNDS());
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ PyStackRef_CLOSE(iterable);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (iter_o == NULL) {
+ JUMP_TO_LABEL(error);
+ }
+ iter = PyStackRef_FromPyObjectSteal(iter_o);
+ index_or_null = PyStackRef_NULL;
+ stack_pointer += 1;
+ }
+ stack_pointer[-1] = iter;
+ stack_pointer[0] = index_or_null;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
DISPATCH();
@@ -6999,7 +6992,7 @@
_PyStackRef receiver;
_PyStackRef value;
value = stack_pointer[-1];
- receiver = stack_pointer[-2];
+ receiver = stack_pointer[-3];
if (PyStackRef_GenCheck(receiver)) {
_PyFrame_SetStackPointer(frame, stack_pointer);
int err = monitor_stop_iteration(tstate, frame, this_instr, PyStackRef_AsPyObjectBorrow(value));
@@ -7061,35 +7054,51 @@
next_instr += 2;
INSTRUCTION_STATS(INSTRUMENTED_FOR_ITER);
_PyStackRef iter;
+ _PyStackRef null_or_index;
_PyStackRef next;
/* Skip 1 cache entry */
- iter = stack_pointer[-1];
+ null_or_index = stack_pointer[-1];
+ iter = stack_pointer[-2];
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (next_o != NULL) {
- next = PyStackRef_FromPyObjectSteal(next_o);
+ if (PyStackRef_IsTaggedInt(null_or_index)) {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ next = _PyForIter_NextWithIndex(iter_o, null_or_index);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (PyStackRef_IsNull(next)) {
+ JUMPBY(oparg + 1);
+ DISPATCH();
+ }
+ null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
}
else {
- if (_PyErr_Occurred(tstate)) {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (!matches) {
- JUMP_TO_LABEL(error);
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (next_o != NULL) {
+ next = PyStackRef_FromPyObjectSteal(next_o);
+ INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
+ }
+ else {
+ if (_PyErr_Occurred(tstate)) {
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (!matches) {
+ JUMP_TO_LABEL(error);
+ }
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ _PyEval_MonitorRaise(tstate, frame, this_instr);
+ _PyErr_Clear(tstate);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
}
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
- stack_pointer = _PyFrame_GetStackPointer(frame);
+ assert(next_instr[oparg].op.code == END_FOR ||
+ next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
+ JUMPBY(oparg + 1);
+ DISPATCH();
}
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- JUMPBY(oparg + 1);
- DISPATCH();
}
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
@@ -7356,9 +7365,12 @@
next_instr += 1;
INSTRUCTION_STATS(INSTRUMENTED_POP_ITER);
_PyStackRef iter;
- iter = stack_pointer[-1];
+ _PyStackRef index_or_null;
+ index_or_null = stack_pointer[-1];
+ iter = stack_pointer[-2];
+ (void)index_or_null;
INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT);
- stack_pointer += -1;
+ stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(iter);
@@ -10122,12 +10134,15 @@
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(POP_ITER);
- _PyStackRef value;
- value = stack_pointer[-1];
- stack_pointer += -1;
+ _PyStackRef iter;
+ _PyStackRef index_or_null;
+ index_or_null = stack_pointer[-1];
+ iter = stack_pointer[-2];
+ (void)index_or_null;
+ stack_pointer += -2;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
- PyStackRef_CLOSE(value);
+ PyStackRef_CLOSE(iter);
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH();
}
@@ -10275,7 +10290,7 @@
stack_pointer += -1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
- PyStackRef_CLOSE(value);
+ PyStackRef_XCLOSE(value);
stack_pointer = _PyFrame_GetStackPointer(frame);
DISPATCH();
}
diff --git a/Python/hamt.c b/Python/hamt.c
index f9bbf63961d..906149cc6cd 100644
--- a/Python/hamt.c
+++ b/Python/hamt.c
@@ -1176,7 +1176,7 @@ hamt_node_bitmap_dump(PyHamtNode_Bitmap *node,
}
if (key_or_null == NULL) {
- if (PyUnicodeWriter_WriteUTF8(writer, "NULL:\n", -1) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, "NULL:\n", 6) < 0) {
goto error;
}
@@ -1194,7 +1194,7 @@ hamt_node_bitmap_dump(PyHamtNode_Bitmap *node,
}
}
- if (PyUnicodeWriter_WriteUTF8(writer, "\n", 1) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, "\n", 1) < 0) {
goto error;
}
}
@@ -1915,7 +1915,7 @@ hamt_node_array_dump(PyHamtNode_Array *node,
goto error;
}
- if (PyUnicodeWriter_WriteUTF8(writer, "\n", 1) < 0) {
+ if (PyUnicodeWriter_WriteASCII(writer, "\n", 1) < 0) {
goto error;
}
}
diff --git a/Python/import.c b/Python/import.c
index e7be1b90751..184dede335d 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -3369,11 +3369,11 @@ PyObject *
PyImport_GetImporter(PyObject *path)
{
PyThreadState *tstate = _PyThreadState_GET();
- PyObject *path_importer_cache = _PySys_GetRequiredAttrString("path_importer_cache");
+ PyObject *path_importer_cache = PySys_GetAttrString("path_importer_cache");
if (path_importer_cache == NULL) {
return NULL;
}
- PyObject *path_hooks = _PySys_GetRequiredAttrString("path_hooks");
+ PyObject *path_hooks = PySys_GetAttrString("path_hooks");
if (path_hooks == NULL) {
Py_DECREF(path_importer_cache);
return NULL;
@@ -3682,14 +3682,14 @@ import_find_and_load(PyThreadState *tstate, PyObject *abs_name)
PyTime_t t1 = 0, accumulated_copy = accumulated;
PyObject *sys_path, *sys_meta_path, *sys_path_hooks;
- if (_PySys_GetOptionalAttrString("path", &sys_path) < 0) {
+ if (PySys_GetOptionalAttrString("path", &sys_path) < 0) {
return NULL;
}
- if (_PySys_GetOptionalAttrString("meta_path", &sys_meta_path) < 0) {
+ if (PySys_GetOptionalAttrString("meta_path", &sys_meta_path) < 0) {
Py_XDECREF(sys_path);
return NULL;
}
- if (_PySys_GetOptionalAttrString("path_hooks", &sys_path_hooks) < 0) {
+ if (PySys_GetOptionalAttrString("path_hooks", &sys_path_hooks) < 0) {
Py_XDECREF(sys_meta_path);
Py_XDECREF(sys_path);
return NULL;
@@ -3964,8 +3964,10 @@ PyImport_Import(PyObject *module_name)
if (globals != NULL) {
Py_INCREF(globals);
builtins = PyObject_GetItem(globals, &_Py_ID(__builtins__));
- if (builtins == NULL)
+ if (builtins == NULL) {
+ // XXX Fall back to interp->builtins or sys.modules['builtins']?
goto err;
+ }
}
else {
/* No globals -- use standard builtins, and fake globals */
@@ -4127,7 +4129,7 @@ _PyImport_FiniCore(PyInterpreterState *interp)
static int
init_zipimport(PyThreadState *tstate, int verbose)
{
- PyObject *path_hooks = _PySys_GetRequiredAttrString("path_hooks");
+ PyObject *path_hooks = PySys_GetAttrString("path_hooks");
if (path_hooks == NULL) {
return -1;
}
diff --git a/Python/initconfig.c b/Python/initconfig.c
index 25e30aa648e..71d7cfed5c4 100644
--- a/Python/initconfig.c
+++ b/Python/initconfig.c
@@ -3647,7 +3647,7 @@ _Py_DumpPathConfig(PyThreadState *tstate)
#define DUMP_SYS(NAME) \
do { \
PySys_FormatStderr(" sys.%s = ", #NAME); \
- if (_PySys_GetOptionalAttrString(#NAME, &obj) < 0) { \
+ if (PySys_GetOptionalAttrString(#NAME, &obj) < 0) { \
PyErr_Clear(); \
} \
if (obj != NULL) { \
@@ -3671,7 +3671,7 @@ _Py_DumpPathConfig(PyThreadState *tstate)
#undef DUMP_SYS
PyObject *sys_path;
- (void) _PySys_GetOptionalAttrString("path", &sys_path);
+ (void) PySys_GetOptionalAttrString("path", &sys_path);
if (sys_path != NULL && PyList_Check(sys_path)) {
PySys_WriteStderr(" sys.path = [\n");
Py_ssize_t len = PyList_GET_SIZE(sys_path);
@@ -4294,7 +4294,7 @@ _PyConfig_CreateXOptionsDict(const PyConfig *config)
static int
config_get_sys_write_bytecode(const PyConfig *config, int *value)
{
- PyObject *attr = _PySys_GetRequiredAttrString("dont_write_bytecode");
+ PyObject *attr = PySys_GetAttrString("dont_write_bytecode");
if (attr == NULL) {
return -1;
}
@@ -4315,7 +4315,7 @@ config_get(const PyConfig *config, const PyConfigSpec *spec,
{
if (use_sys) {
if (spec->sys.attr != NULL) {
- return _PySys_GetRequiredAttrString(spec->sys.attr);
+ return PySys_GetAttrString(spec->sys.attr);
}
if (strcmp(spec->name, "write_bytecode") == 0) {
diff --git a/Python/intrinsics.c b/Python/intrinsics.c
index ff44ba0ee64..8ea920e690c 100644
--- a/Python/intrinsics.c
+++ b/Python/intrinsics.c
@@ -9,7 +9,6 @@
#include "pycore_intrinsics.h" // INTRINSIC_PRINT
#include "pycore_pyerrors.h" // _PyErr_SetString()
#include "pycore_runtime.h" // _Py_ID()
-#include "pycore_sysmodule.h" // _PySys_GetRequiredAttr()
#include "pycore_tuple.h" // _PyTuple_FromArray()
#include "pycore_typevarobject.h" // _Py_make_typevar()
#include "pycore_unicodeobject.h" // _PyUnicode_FromASCII()
@@ -27,7 +26,7 @@ no_intrinsic1(PyThreadState* tstate, PyObject *unused)
static PyObject *
print_expr(PyThreadState* Py_UNUSED(ignored), PyObject *value)
{
- PyObject *hook = _PySys_GetRequiredAttr(&_Py_ID(displayhook));
+ PyObject *hook = PySys_GetAttr(&_Py_ID(displayhook));
if (hook == NULL) {
return NULL;
}
diff --git a/Python/lock.c b/Python/lock.c
index 28a12ad1835..b125ad0c9e3 100644
--- a/Python/lock.c
+++ b/Python/lock.c
@@ -119,6 +119,9 @@ _PyMutex_LockTimed(PyMutex *m, PyTime_t timeout, _PyLockFlags flags)
return PY_LOCK_INTR;
}
}
+ else if (ret == Py_PARK_INTR && (flags & _PY_FAIL_IF_INTERRUPTED)) {
+ return PY_LOCK_INTR;
+ }
else if (ret == Py_PARK_TIMEOUT) {
assert(timeout >= 0);
return PY_LOCK_FAILURE;
diff --git a/Python/modsupport.c b/Python/modsupport.c
index 2caf595949d..437ad412027 100644
--- a/Python/modsupport.c
+++ b/Python/modsupport.c
@@ -669,5 +669,5 @@ Py_PACK_FULL_VERSION(int x, int y, int z, int level, int serial)
uint32_t
Py_PACK_VERSION(int x, int y)
{
- return Py_PACK_FULL_VERSION(x, y, 0, 0, 0);
+ return _Py_PACK_VERSION(x, y);
}
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index 49c6bfb6c1b..e1209209660 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -840,7 +840,7 @@ dummy_func(void) {
value = sym_new_unknown(ctx);
}
- op(_FOR_ITER_GEN_FRAME, (unused -- unused, gen_frame: _Py_UOpsAbstractFrame*)) {
+ op(_FOR_ITER_GEN_FRAME, (unused, unused -- unused, unused, gen_frame: _Py_UOpsAbstractFrame*)) {
gen_frame = NULL;
/* We are about to hit the end of the trace */
ctx->done = true;
@@ -914,7 +914,14 @@ dummy_func(void) {
}
}
- op(_ITER_NEXT_RANGE, (iter -- iter, next)) {
+ op(_ITER_CHECK_TUPLE, (iter, null_or_index -- iter, null_or_index)) {
+ if (sym_matches_type(iter, &PyTuple_Type)) {
+ REPLACE_OP(this_instr, _NOP, 0, 0);
+ }
+ sym_set_type(iter, &PyTuple_Type);
+ }
+
+ op(_ITER_NEXT_RANGE, (iter, null_or_index -- iter, null_or_index, next)) {
next = sym_new_type(ctx, &PyLong_Type);
}
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index bf7ac72d457..db86edcc785 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -126,6 +126,12 @@
break;
}
+ case _POP_ITER: {
+ stack_pointer += -2;
+ assert(WITHIN_STACK_BOUNDS());
+ break;
+ }
+
case _END_SEND: {
JitOptSymbol *val;
val = sym_new_not_null(ctx);
@@ -1557,8 +1563,13 @@
case _GET_ITER: {
JitOptSymbol *iter;
+ JitOptSymbol *index_or_null;
iter = sym_new_not_null(ctx);
+ index_or_null = sym_new_not_null(ctx);
stack_pointer[-1] = iter;
+ stack_pointer[0] = index_or_null;
+ stack_pointer += 1;
+ assert(WITHIN_STACK_BOUNDS());
break;
}
@@ -1604,6 +1615,12 @@
}
case _ITER_CHECK_TUPLE: {
+ JitOptSymbol *iter;
+ iter = stack_pointer[-2];
+ if (sym_matches_type(iter, &PyTuple_Type)) {
+ REPLACE_OP(this_instr, _NOP, 0, 0);
+ }
+ sym_set_type(iter, &PyTuple_Type);
break;
}
diff --git a/Python/parking_lot.c b/Python/parking_lot.c
index 8edf4323594..e896dea0271 100644
--- a/Python/parking_lot.c
+++ b/Python/parking_lot.c
@@ -112,17 +112,27 @@ _PySemaphore_PlatformWait(_PySemaphore *sema, PyTime_t timeout)
}
}
- // NOTE: we wait on the sigint event even in non-main threads to match the
- // behavior of the other platforms. Non-main threads will ignore the
- // Py_PARK_INTR result.
- HANDLE sigint_event = _PyOS_SigintEvent();
- HANDLE handles[2] = { sema->platform_sem, sigint_event };
- DWORD count = sigint_event != NULL ? 2 : 1;
+ HANDLE handles[2] = { sema->platform_sem, NULL };
+ HANDLE sigint_event = NULL;
+ DWORD count = 1;
+ if (_Py_IsMainThread()) {
+ // gh-135099: Wait on the SIGINT event only in the main thread. Other
+ // threads would ignore the result anyways, and accessing
+ // `_PyOS_SigintEvent()` from non-main threads may race with
+ // interpreter shutdown, which closes the event handle. Note that
+ // non-main interpreters will ignore the result.
+ sigint_event = _PyOS_SigintEvent();
+ if (sigint_event != NULL) {
+ handles[1] = sigint_event;
+ count = 2;
+ }
+ }
wait = WaitForMultipleObjects(count, handles, FALSE, millis);
if (wait == WAIT_OBJECT_0) {
res = Py_PARK_OK;
}
else if (wait == WAIT_OBJECT_0 + 1) {
+ assert(sigint_event != NULL);
ResetEvent(sigint_event);
res = Py_PARK_INTR;
}
diff --git a/Python/perf_jit_trampoline.c b/Python/perf_jit_trampoline.c
index 5c7cb5b0a99..2ca18c23593 100644
--- a/Python/perf_jit_trampoline.c
+++ b/Python/perf_jit_trampoline.c
@@ -869,7 +869,11 @@ static void elf_init_ehframe(ELFObjectContext* ctx) {
*/
#ifdef __x86_64__
/* x86_64 calling convention unwinding rules */
+# if defined(__CET__) && (__CET__ & 1)
+ DWRF_U8(DWRF_CFA_advance_loc | 8); // Advance location by 8 bytes when CET protection is enabled
+# else
DWRF_U8(DWRF_CFA_advance_loc | 4); // Advance location by 4 bytes
+# endif
DWRF_U8(DWRF_CFA_def_cfa_offset); // Redefine CFA offset
DWRF_UV(16); // New offset: SP + 16
DWRF_U8(DWRF_CFA_advance_loc | 6); // Advance location by 6 bytes
diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c
index 8394245d373..724fda63511 100644
--- a/Python/pylifecycle.c
+++ b/Python/pylifecycle.c
@@ -1283,7 +1283,7 @@ init_interp_main(PyThreadState *tstate)
if (is_main_interp) {
/* Initialize warnings. */
PyObject *warnoptions;
- if (_PySys_GetOptionalAttrString("warnoptions", &warnoptions) < 0) {
+ if (PySys_GetOptionalAttrString("warnoptions", &warnoptions) < 0) {
return _PyStatus_ERR("can't initialize warnings");
}
if (warnoptions != NULL && PyList_Check(warnoptions) &&
@@ -1806,7 +1806,7 @@ flush_std_files(void)
PyObject *file;
int status = 0;
- if (_PySys_GetOptionalAttr(&_Py_ID(stdout), &file) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stdout), &file) < 0) {
status = -1;
}
else if (file != NULL && file != Py_None && !file_is_closed(file)) {
@@ -1819,7 +1819,7 @@ flush_std_files(void)
}
Py_XDECREF(file);
- if (_PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) {
PyErr_Clear();
status = -1;
}
@@ -3046,7 +3046,7 @@ _Py_FatalError_PrintExc(PyThreadState *tstate)
}
PyObject *ferr;
- if (_PySys_GetOptionalAttr(&_Py_ID(stderr), &ferr) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stderr), &ferr) < 0) {
_PyErr_Clear(tstate);
}
if (ferr == NULL || ferr == Py_None) {
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
index f67b72aa91f..8f1c78bf831 100644
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -114,7 +114,7 @@ _PyRun_InteractiveLoopObject(FILE *fp, PyObject *filename, PyCompilerFlags *flag
}
PyObject *v;
- if (_PySys_GetOptionalAttr(&_Py_ID(ps1), &v) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(ps1), &v) < 0) {
PyErr_Print();
return -1;
}
@@ -128,7 +128,7 @@ _PyRun_InteractiveLoopObject(FILE *fp, PyObject *filename, PyCompilerFlags *flag
}
}
Py_XDECREF(v);
- if (_PySys_GetOptionalAttr(&_Py_ID(ps2), &v) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(ps2), &v) < 0) {
PyErr_Print();
return -1;
}
@@ -206,7 +206,7 @@ pyrun_one_parse_ast(FILE *fp, PyObject *filename,
PyObject *encoding_obj = NULL;
const char *encoding = NULL;
if (fp == stdin) {
- if (_PySys_GetOptionalAttr(&_Py_ID(stdin), &attr) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stdin), &attr) < 0) {
PyErr_Clear();
}
else if (attr != NULL && attr != Py_None) {
@@ -226,7 +226,7 @@ pyrun_one_parse_ast(FILE *fp, PyObject *filename,
// Get sys.ps1 (as UTF-8)
PyObject *ps1_obj = NULL;
const char *ps1 = "";
- if (_PySys_GetOptionalAttr(&_Py_ID(ps1), &attr) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(ps1), &attr) < 0) {
PyErr_Clear();
}
else if (attr != NULL) {
@@ -247,7 +247,7 @@ pyrun_one_parse_ast(FILE *fp, PyObject *filename,
// Get sys.ps2 (as UTF-8)
PyObject *ps2_obj = NULL;
const char *ps2 = "";
- if (_PySys_GetOptionalAttr(&_Py_ID(ps2), &attr) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(ps2), &attr) < 0) {
PyErr_Clear();
}
else if (attr != NULL) {
@@ -658,7 +658,7 @@ _Py_HandleSystemExitAndKeyboardInterrupt(int *exitcode_p)
}
PyObject *sys_stderr;
- if (_PySys_GetOptionalAttr(&_Py_ID(stderr), &sys_stderr) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stderr), &sys_stderr) < 0) {
PyErr_Clear();
}
else if (sys_stderr != NULL && sys_stderr != Py_None) {
@@ -722,7 +722,7 @@ _PyErr_PrintEx(PyThreadState *tstate, int set_sys_last_vars)
_PyErr_Clear(tstate);
}
}
- if (_PySys_GetOptionalAttr(&_Py_ID(excepthook), &hook) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(excepthook), &hook) < 0) {
PyErr_Clear();
}
if (_PySys_Audit(tstate, "sys.excepthook", "OOOO", hook ? hook : Py_None,
@@ -1197,7 +1197,7 @@ void
PyErr_Display(PyObject *unused, PyObject *value, PyObject *tb)
{
PyObject *file;
- if (_PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(stderr), &file) < 0) {
PyObject *exc = PyErr_GetRaisedException();
_PyObject_Dump(value);
fprintf(stderr, "lost sys.stderr\n");
@@ -1321,7 +1321,7 @@ static void
flush_io_stream(PyThreadState *tstate, PyObject *name)
{
PyObject *f;
- if (_PySys_GetOptionalAttr(name, &f) < 0) {
+ if (PySys_GetOptionalAttr(name, &f) < 0) {
PyErr_Clear();
}
if (f != NULL) {
diff --git a/Python/remote_debug.h b/Python/remote_debug.h
index dbc6bdd09a6..6cbf1c8deaa 100644
--- a/Python/remote_debug.h
+++ b/Python/remote_debug.h
@@ -35,7 +35,7 @@ extern "C" {
# include <sys/mman.h>
#endif
-#if defined(__APPLE__) && TARGET_OS_OSX
+#if defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
# include <libproc.h>
# include <mach-o/fat.h>
# include <mach-o/loader.h>
@@ -73,6 +73,18 @@ extern "C" {
# define HAVE_PROCESS_VM_READV 0
#endif
+#define _set_debug_exception_cause(exception, format, ...) \
+ do { \
+ if (!PyErr_ExceptionMatches(PyExc_PermissionError)) { \
+ PyThreadState *tstate = _PyThreadState_GET(); \
+ if (!_PyErr_Occurred(tstate)) { \
+ _PyErr_Format(tstate, exception, format, ##__VA_ARGS__); \
+ } else { \
+ _PyErr_FormatFromCause(exception, format, ##__VA_ARGS__); \
+ } \
+ } \
+ } while (0)
+
static inline size_t
get_page_size(void) {
size_t page_size = 0;
@@ -100,7 +112,7 @@ typedef struct page_cache_entry {
// Define a platform-independent process handle structure
typedef struct {
pid_t pid;
-#if defined(__APPLE__)
+#if defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
mach_port_t task;
#elif defined(MS_WINDOWS)
HANDLE hProcess;
@@ -127,7 +139,7 @@ _Py_RemoteDebug_ClearCache(proc_handle_t *handle)
}
}
-#if defined(__APPLE__) && TARGET_OS_OSX
+#if defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
static mach_port_t pid_to_task(pid_t pid);
#endif
@@ -135,14 +147,19 @@ static mach_port_t pid_to_task(pid_t pid);
static int
_Py_RemoteDebug_InitProcHandle(proc_handle_t *handle, pid_t pid) {
handle->pid = pid;
-#if defined(__APPLE__)
+#if defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
handle->task = pid_to_task(handle->pid);
+ if (handle->task == 0) {
+ _set_debug_exception_cause(PyExc_RuntimeError, "Failed to initialize macOS process handle");
+ return -1;
+ }
#elif defined(MS_WINDOWS)
handle->hProcess = OpenProcess(
PROCESS_VM_READ | PROCESS_VM_WRITE | PROCESS_VM_OPERATION | PROCESS_QUERY_INFORMATION,
FALSE, pid);
if (handle->hProcess == NULL) {
PyErr_SetFromWindowsErr(0);
+ _set_debug_exception_cause(PyExc_RuntimeError, "Failed to initialize Windows process handle");
return -1;
}
#endif
@@ -167,7 +184,7 @@ _Py_RemoteDebug_CleanupProcHandle(proc_handle_t *handle) {
_Py_RemoteDebug_FreePageCache(handle);
}
-#if defined(__APPLE__) && TARGET_OS_OSX
+#if defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
static uintptr_t
return_section_address64(
@@ -206,8 +223,10 @@ return_section_address64(
&object_name
);
if (ret != KERN_SUCCESS) {
- PyErr_SetString(
- PyExc_RuntimeError, "Cannot get any more VM maps.\n");
+ PyErr_Format(PyExc_RuntimeError,
+ "mach_vm_region failed while parsing 64-bit Mach-O binary "
+ "at base address 0x%lx (kern_return_t: %d)",
+ base, ret);
return 0;
}
}
@@ -227,9 +246,6 @@ return_section_address64(
cmd = (struct segment_command_64*)((void*)cmd + cmd->cmdsize);
}
- // We should not be here, but if we are there, we should say about this
- PyErr_SetString(
- PyExc_RuntimeError, "Cannot find section address.\n");
return 0;
}
@@ -270,8 +286,10 @@ return_section_address32(
&object_name
);
if (ret != KERN_SUCCESS) {
- PyErr_SetString(
- PyExc_RuntimeError, "Cannot get any more VM maps.\n");
+ PyErr_Format(PyExc_RuntimeError,
+ "mach_vm_region failed while parsing 32-bit Mach-O binary "
+ "at base address 0x%lx (kern_return_t: %d)",
+ base, ret);
return 0;
}
}
@@ -291,9 +309,6 @@ return_section_address32(
cmd = (struct segment_command*)((void*)cmd + cmd->cmdsize);
}
- // We should not be here, but if we are there, we should say about this
- PyErr_SetString(
- PyExc_RuntimeError, "Cannot find section address.\n");
return 0;
}
@@ -311,8 +326,20 @@ return_section_address_fat(
int is_abi64;
size_t cpu_size = sizeof(cpu), abi64_size = sizeof(is_abi64);
- sysctlbyname("hw.cputype", &cpu, &cpu_size, NULL, 0);
- sysctlbyname("hw.cpu64bit_capable", &is_abi64, &abi64_size, NULL, 0);
+ if (sysctlbyname("hw.cputype", &cpu, &cpu_size, NULL, 0) != 0) {
+ PyErr_Format(PyExc_OSError,
+ "Failed to determine CPU type via sysctlbyname "
+ "for fat binary analysis at 0x%lx: %s",
+ base, strerror(errno));
+ return 0;
+ }
+ if (sysctlbyname("hw.cpu64bit_capable", &is_abi64, &abi64_size, NULL, 0) != 0) {
+ PyErr_Format(PyExc_OSError,
+ "Failed to determine CPU ABI capability via sysctlbyname "
+ "for fat binary analysis at 0x%lx: %s",
+ base, strerror(errno));
+ return 0;
+ }
cpu |= is_abi64 * CPU_ARCH_ABI64;
@@ -343,13 +370,18 @@ return_section_address_fat(
return return_section_address64(section, proc_ref, base, (void*)hdr);
default:
- PyErr_SetString(PyExc_RuntimeError, "Unknown Mach-O magic in fat binary.\n");
+ PyErr_Format(PyExc_RuntimeError,
+ "Unknown Mach-O magic number 0x%x in fat binary architecture %u at base 0x%lx",
+ hdr->magic, i, base);
return 0;
}
}
}
- PyErr_SetString(PyExc_RuntimeError, "No matching architecture found in fat binary.\n");
+ PyErr_Format(PyExc_RuntimeError,
+ "No matching architecture found for CPU type 0x%x "
+ "in fat binary at base 0x%lx (%u architectures examined)",
+ cpu, base, nfat_arch);
return 0;
}
@@ -358,20 +390,26 @@ search_section_in_file(const char* secname, char* path, uintptr_t base, mach_vm_
{
int fd = open(path, O_RDONLY);
if (fd == -1) {
- PyErr_Format(PyExc_RuntimeError, "Cannot open binary %s\n", path);
+ PyErr_Format(PyExc_OSError,
+ "Cannot open binary file '%s' for section '%s' search: %s",
+ path, secname, strerror(errno));
return 0;
}
struct stat fs;
if (fstat(fd, &fs) == -1) {
- PyErr_Format(PyExc_RuntimeError, "Cannot get size of binary %s\n", path);
+ PyErr_Format(PyExc_OSError,
+ "Cannot get file size for binary '%s' during section '%s' search: %s",
+ path, secname, strerror(errno));
close(fd);
return 0;
}
void* map = mmap(0, fs.st_size, PROT_READ, MAP_SHARED, fd, 0);
if (map == MAP_FAILED) {
- PyErr_Format(PyExc_RuntimeError, "Cannot map binary %s\n", path);
+ PyErr_Format(PyExc_OSError,
+ "Cannot memory map binary file '%s' (size: %lld bytes) for section '%s' search: %s",
+ path, (long long)fs.st_size, secname, strerror(errno));
close(fd);
return 0;
}
@@ -393,13 +431,22 @@ search_section_in_file(const char* secname, char* path, uintptr_t base, mach_vm_
result = return_section_address_fat(secname, proc_ref, base, map);
break;
default:
- PyErr_SetString(PyExc_RuntimeError, "Unknown Mach-O magic");
+ PyErr_Format(PyExc_RuntimeError,
+ "Unrecognized Mach-O magic number 0x%x in binary file '%s' for section '%s' search",
+ magic, path, secname);
break;
}
- munmap(map, fs.st_size);
+ if (munmap(map, fs.st_size) != 0) {
+ PyErr_Format(PyExc_OSError,
+ "Failed to unmap binary file '%s' (size: %lld bytes): %s",
+ path, (long long)fs.st_size, strerror(errno));
+ result = 0;
+ }
if (close(fd) != 0) {
- PyErr_SetFromErrno(PyExc_OSError);
+ PyErr_Format(PyExc_OSError,
+ "Failed to close binary file '%s': %s",
+ path, strerror(errno));
result = 0;
}
return result;
@@ -414,7 +461,10 @@ pid_to_task(pid_t pid)
result = task_for_pid(mach_task_self(), pid, &task);
if (result != KERN_SUCCESS) {
- PyErr_Format(PyExc_PermissionError, "Cannot get task for PID %d", pid);
+ PyErr_Format(PyExc_PermissionError,
+ "Cannot get task port for PID %d (kern_return_t: %d). "
+ "This typically requires running as root or having the 'com.apple.system-task-ports' entitlement.",
+ pid, result);
return 0;
}
return task;
@@ -431,13 +481,15 @@ search_map_for_section(proc_handle_t *handle, const char* secname, const char* s
mach_port_t proc_ref = pid_to_task(handle->pid);
if (proc_ref == 0) {
if (!PyErr_Occurred()) {
- PyErr_SetString(PyExc_PermissionError, "Cannot get task for PID");
+ PyErr_Format(PyExc_PermissionError,
+ "Cannot get task port for PID %d during section search",
+ handle->pid);
}
return 0;
}
- int match_found = 0;
char map_filename[MAXPATHLEN + 1];
+
while (mach_vm_region(
proc_ref,
&address,
@@ -447,6 +499,7 @@ search_map_for_section(proc_handle_t *handle, const char* secname, const char* s
&count,
&object_name) == KERN_SUCCESS)
{
+
if ((region_info.protection & VM_PROT_READ) == 0
|| (region_info.protection & VM_PROT_EXECUTE) == 0) {
address += size;
@@ -467,21 +520,21 @@ search_map_for_section(proc_handle_t *handle, const char* secname, const char* s
filename = map_filename; // No path, use the whole string
}
- if (!match_found && strncmp(filename, substr, strlen(substr)) == 0) {
- match_found = 1;
- return search_section_in_file(
+ if (strncmp(filename, substr, strlen(substr)) == 0) {
+ uintptr_t result = search_section_in_file(
secname, map_filename, address, size, proc_ref);
+ if (result != 0) {
+ return result;
+ }
}
address += size;
}
- PyErr_SetString(PyExc_RuntimeError,
- "mach_vm_region failed to find the section");
return 0;
}
-#endif // (__APPLE__ && TARGET_OS_OSX)
+#endif // (__APPLE__ && defined(TARGET_OS_OSX) && TARGET_OS_OSX)
#if defined(__linux__) && HAVE_PROCESS_VM_READV
static uintptr_t
@@ -500,24 +553,38 @@ search_elf_file_for_section(
int fd = open(elf_file, O_RDONLY);
if (fd < 0) {
- PyErr_SetFromErrno(PyExc_OSError);
+ PyErr_Format(PyExc_OSError,
+ "Cannot open ELF file '%s' for section '%s' search: %s",
+ elf_file, secname, strerror(errno));
goto exit;
}
struct stat file_stats;
if (fstat(fd, &file_stats) != 0) {
- PyErr_SetFromErrno(PyExc_OSError);
+ PyErr_Format(PyExc_OSError,
+ "Cannot get file size for ELF file '%s' during section '%s' search: %s",
+ elf_file, secname, strerror(errno));
goto exit;
}
file_memory = mmap(NULL, file_stats.st_size, PROT_READ, MAP_PRIVATE, fd, 0);
if (file_memory == MAP_FAILED) {
- PyErr_SetFromErrno(PyExc_OSError);
+ PyErr_Format(PyExc_OSError,
+ "Cannot memory map ELF file '%s' (size: %lld bytes) for section '%s' search: %s",
+ elf_file, (long long)file_stats.st_size, secname, strerror(errno));
goto exit;
}
Elf_Ehdr* elf_header = (Elf_Ehdr*)file_memory;
+ // Validate ELF header
+ if (elf_header->e_shstrndx >= elf_header->e_shnum) {
+ PyErr_Format(PyExc_RuntimeError,
+ "Invalid ELF file '%s': string table index %u >= section count %u",
+ elf_file, elf_header->e_shstrndx, elf_header->e_shnum);
+ goto exit;
+ }
+
Elf_Shdr* section_header_table = (Elf_Shdr*)(file_memory + elf_header->e_shoff);
Elf_Shdr* shstrtab_section = &section_header_table[elf_header->e_shstrndx];
@@ -534,6 +601,10 @@ search_elf_file_for_section(
}
}
+ if (section == NULL) {
+ goto exit;
+ }
+
Elf_Phdr* program_header_table = (Elf_Phdr*)(file_memory + elf_header->e_phoff);
// Find the first PT_LOAD segment
Elf_Phdr* first_load_segment = NULL;
@@ -544,18 +615,25 @@ search_elf_file_for_section(
}
}
- if (section != NULL && first_load_segment != NULL) {
- uintptr_t elf_load_addr = first_load_segment->p_vaddr
- - (first_load_segment->p_vaddr % first_load_segment->p_align);
- result = start_address + (uintptr_t)section->sh_addr - elf_load_addr;
+ if (first_load_segment == NULL) {
+ PyErr_Format(PyExc_RuntimeError,
+ "No PT_LOAD segment found in ELF file '%s' (%u program headers examined)",
+ elf_file, elf_header->e_phnum);
+ goto exit;
}
+ uintptr_t elf_load_addr = first_load_segment->p_vaddr
+ - (first_load_segment->p_vaddr % first_load_segment->p_align);
+ result = start_address + (uintptr_t)section->sh_addr - elf_load_addr;
+
exit:
if (file_memory != NULL) {
munmap(file_memory, file_stats.st_size);
}
if (fd >= 0 && close(fd) != 0) {
- PyErr_SetFromErrno(PyExc_OSError);
+ PyErr_Format(PyExc_OSError,
+ "Failed to close ELF file '%s': %s",
+ elf_file, strerror(errno));
result = 0;
}
return result;
@@ -569,7 +647,9 @@ search_linux_map_for_section(proc_handle_t *handle, const char* secname, const c
FILE* maps_file = fopen(maps_file_path, "r");
if (maps_file == NULL) {
- PyErr_SetFromErrno(PyExc_OSError);
+ PyErr_Format(PyExc_OSError,
+ "Cannot open process memory map file '%s' for PID %d section search: %s",
+ maps_file_path, handle->pid, strerror(errno));
return 0;
}
@@ -578,11 +658,16 @@ search_linux_map_for_section(proc_handle_t *handle, const char* secname, const c
char *line = PyMem_Malloc(linesz);
if (!line) {
fclose(maps_file);
- PyErr_NoMemory();
+ _set_debug_exception_cause(PyExc_MemoryError,
+ "Cannot allocate memory for reading process map file '%s'",
+ maps_file_path);
return 0;
}
uintptr_t retval = 0;
+ int lines_processed = 0;
+ int matches_found = 0;
+
while (fgets(line + linelen, linesz - linelen, maps_file) != NULL) {
linelen = strlen(line);
if (line[linelen - 1] != '\n') {
@@ -593,7 +678,9 @@ search_linux_map_for_section(proc_handle_t *handle, const char* secname, const c
if (!biggerline) {
PyMem_Free(line);
fclose(maps_file);
- PyErr_NoMemory();
+ _set_debug_exception_cause(PyExc_MemoryError,
+ "Cannot reallocate memory while reading process map file '%s' (attempted size: %zu)",
+ maps_file_path, linesz);
return 0;
}
line = biggerline;
@@ -604,6 +691,7 @@ search_linux_map_for_section(proc_handle_t *handle, const char* secname, const c
line[linelen - 1] = '\0';
// and prepare to read the next line into the start of the buffer.
linelen = 0;
+ lines_processed++;
unsigned long start = 0;
unsigned long path_pos = 0;
@@ -624,6 +712,7 @@ search_linux_map_for_section(proc_handle_t *handle, const char* secname, const c
}
if (strstr(filename, substr)) {
+ matches_found++;
retval = search_elf_file_for_section(handle, secname, start, path);
if (retval) {
break;
@@ -633,7 +722,9 @@ search_linux_map_for_section(proc_handle_t *handle, const char* secname, const c
PyMem_Free(line);
if (fclose(maps_file) != 0) {
- PyErr_SetFromErrno(PyExc_OSError);
+ PyErr_Format(PyExc_OSError,
+ "Failed to close process map file '%s': %s",
+ maps_file_path, strerror(errno));
retval = 0;
}
@@ -649,11 +740,20 @@ static void* analyze_pe(const wchar_t* mod_path, BYTE* remote_base, const char*
HANDLE hFile = CreateFileW(mod_path, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL);
if (hFile == INVALID_HANDLE_VALUE) {
PyErr_SetFromWindowsErr(0);
+ DWORD error = GetLastError();
+ PyErr_Format(PyExc_OSError,
+ "Cannot open PE file for section '%s' analysis (error %lu)",
+ secname, error);
return NULL;
}
+
HANDLE hMap = CreateFileMapping(hFile, NULL, PAGE_READONLY, 0, 0, 0);
if (!hMap) {
PyErr_SetFromWindowsErr(0);
+ DWORD error = GetLastError();
+ PyErr_Format(PyExc_OSError,
+ "Cannot create file mapping for PE file section '%s' analysis (error %lu)",
+ secname, error);
CloseHandle(hFile);
return NULL;
}
@@ -661,6 +761,10 @@ static void* analyze_pe(const wchar_t* mod_path, BYTE* remote_base, const char*
BYTE* mapView = (BYTE*)MapViewOfFile(hMap, FILE_MAP_READ, 0, 0, 0);
if (!mapView) {
PyErr_SetFromWindowsErr(0);
+ DWORD error = GetLastError();
+ PyErr_Format(PyExc_OSError,
+ "Cannot map view of PE file for section '%s' analysis (error %lu)",
+ secname, error);
CloseHandle(hMap);
CloseHandle(hFile);
return NULL;
@@ -668,7 +772,9 @@ static void* analyze_pe(const wchar_t* mod_path, BYTE* remote_base, const char*
IMAGE_DOS_HEADER* pDOSHeader = (IMAGE_DOS_HEADER*)mapView;
if (pDOSHeader->e_magic != IMAGE_DOS_SIGNATURE) {
- PyErr_SetString(PyExc_RuntimeError, "Invalid DOS signature.");
+ PyErr_Format(PyExc_RuntimeError,
+ "Invalid DOS signature (0x%x) in PE file for section '%s' analysis (expected 0x%x)",
+ pDOSHeader->e_magic, secname, IMAGE_DOS_SIGNATURE);
UnmapViewOfFile(mapView);
CloseHandle(hMap);
CloseHandle(hFile);
@@ -677,7 +783,9 @@ static void* analyze_pe(const wchar_t* mod_path, BYTE* remote_base, const char*
IMAGE_NT_HEADERS* pNTHeaders = (IMAGE_NT_HEADERS*)(mapView + pDOSHeader->e_lfanew);
if (pNTHeaders->Signature != IMAGE_NT_SIGNATURE) {
- PyErr_SetString(PyExc_RuntimeError, "Invalid NT signature.");
+ PyErr_Format(PyExc_RuntimeError,
+ "Invalid NT signature (0x%lx) in PE file for section '%s' analysis (expected 0x%lx)",
+ pNTHeaders->Signature, secname, IMAGE_NT_SIGNATURE);
UnmapViewOfFile(mapView);
CloseHandle(hMap);
CloseHandle(hFile);
@@ -711,17 +819,27 @@ search_windows_map_for_section(proc_handle_t* handle, const char* secname, const
} while (hProcSnap == INVALID_HANDLE_VALUE && GetLastError() == ERROR_BAD_LENGTH);
if (hProcSnap == INVALID_HANDLE_VALUE) {
- PyErr_SetString(PyExc_PermissionError, "Unable to create module snapshot. Check permissions or PID.");
+ PyErr_SetFromWindowsErr(0);
+ DWORD error = GetLastError();
+ PyErr_Format(PyExc_PermissionError,
+ "Unable to create module snapshot for PID %d section '%s' "
+ "search (error %lu). Check permissions or PID validity",
+ handle->pid, secname, error);
return 0;
}
MODULEENTRY32W moduleEntry;
moduleEntry.dwSize = sizeof(moduleEntry);
void* runtime_addr = NULL;
+ int modules_examined = 0;
+ int matches_found = 0;
for (BOOL hasModule = Module32FirstW(hProcSnap, &moduleEntry); hasModule; hasModule = Module32NextW(hProcSnap, &moduleEntry)) {
+ modules_examined++;
+
// Look for either python executable or DLL
if (wcsstr(moduleEntry.szModule, substr)) {
+ matches_found++;
runtime_addr = analyze_pe(moduleEntry.szExePath, moduleEntry.modBaseAddr, secname);
if (runtime_addr != NULL) {
break;
@@ -730,6 +848,7 @@ search_windows_map_for_section(proc_handle_t* handle, const char* secname, const
}
CloseHandle(hProcSnap);
+
return (uintptr_t)runtime_addr;
}
@@ -747,7 +866,9 @@ _Py_RemoteDebug_GetPyRuntimeAddress(proc_handle_t* handle)
if (address == 0) {
// Error out: 'python' substring covers both executable and DLL
PyObject *exc = PyErr_GetRaisedException();
- PyErr_SetString(PyExc_RuntimeError, "Failed to find the PyRuntime section in the process.");
+ PyErr_Format(PyExc_RuntimeError,
+ "Failed to find the PyRuntime section in process %d on Windows platform",
+ handle->pid);
_PyErr_ChainExceptions1(exc);
}
#elif defined(__linux__)
@@ -756,16 +877,28 @@ _Py_RemoteDebug_GetPyRuntimeAddress(proc_handle_t* handle)
if (address == 0) {
// Error out: 'python' substring covers both executable and DLL
PyObject *exc = PyErr_GetRaisedException();
- PyErr_SetString(PyExc_RuntimeError, "Failed to find the PyRuntime section in the process.");
+ PyErr_Format(PyExc_RuntimeError,
+ "Failed to find the PyRuntime section in process %d on Linux platform",
+ handle->pid);
_PyErr_ChainExceptions1(exc);
}
-#elif defined(__APPLE__) && TARGET_OS_OSX
+#elif defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
// On macOS, try libpython first, then fall back to python
- address = search_map_for_section(handle, "PyRuntime", "libpython");
- if (address == 0) {
- // TODO: Differentiate between not found and error
+ const char* candidates[] = {"libpython", "python", "Python", NULL};
+ for (const char** candidate = candidates; *candidate; candidate++) {
PyErr_Clear();
- address = search_map_for_section(handle, "PyRuntime", "python");
+ address = search_map_for_section(handle, "PyRuntime", *candidate);
+ if (address != 0) {
+ break;
+ }
+ }
+ if (address == 0) {
+ PyObject *exc = PyErr_GetRaisedException();
+ PyErr_Format(PyExc_RuntimeError,
+ "Failed to find the PyRuntime section in process %d "
+ "on macOS platform (tried both libpython and python)",
+ handle->pid);
+ _PyErr_ChainExceptions1(exc);
}
#else
Py_UNREACHABLE();
@@ -784,6 +917,11 @@ _Py_RemoteDebug_ReadRemoteMemory(proc_handle_t *handle, uintptr_t remote_address
do {
if (!ReadProcessMemory(handle->hProcess, (LPCVOID)(remote_address + result), (char*)dst + result, len - result, &read_bytes)) {
PyErr_SetFromWindowsErr(0);
+ DWORD error = GetLastError();
+ _set_debug_exception_cause(PyExc_OSError,
+ "ReadProcessMemory failed for PID %d at address 0x%lx "
+ "(size %zu, partial read %zu bytes): Windows error %lu",
+ handle->pid, remote_address + result, len - result, result, error);
return -1;
}
result += read_bytes;
@@ -804,13 +942,17 @@ _Py_RemoteDebug_ReadRemoteMemory(proc_handle_t *handle, uintptr_t remote_address
read_bytes = process_vm_readv(handle->pid, local, 1, remote, 1, 0);
if (read_bytes < 0) {
PyErr_SetFromErrno(PyExc_OSError);
+ _set_debug_exception_cause(PyExc_OSError,
+ "process_vm_readv failed for PID %d at address 0x%lx "
+ "(size %zu, partial read %zd bytes): %s",
+ handle->pid, remote_address + result, len - result, result, strerror(errno));
return -1;
}
result += read_bytes;
} while ((size_t)read_bytes != local[0].iov_len);
return 0;
-#elif defined(__APPLE__) && TARGET_OS_OSX
+#elif defined(__APPLE__) && defined(TARGET_OS_OSX) && TARGET_OS_OSX
Py_ssize_t result = -1;
kern_return_t kr = mach_vm_read_overwrite(
handle->task,
@@ -822,13 +964,22 @@ _Py_RemoteDebug_ReadRemoteMemory(proc_handle_t *handle, uintptr_t remote_address
if (kr != KERN_SUCCESS) {
switch (kr) {
case KERN_PROTECTION_FAILURE:
- PyErr_SetString(PyExc_PermissionError, "Not enough permissions to read memory");
+ PyErr_Format(PyExc_PermissionError,
+ "Memory protection failure reading from PID %d at address "
+ "0x%lx (size %zu): insufficient permissions",
+ handle->pid, remote_address, len);
break;
case KERN_INVALID_ARGUMENT:
- PyErr_SetString(PyExc_PermissionError, "Invalid argument to mach_vm_read_overwrite");
+ PyErr_Format(PyExc_ValueError,
+ "Invalid argument to mach_vm_read_overwrite for PID %d at "
+ "address 0x%lx (size %zu)",
+ handle->pid, remote_address, len);
break;
default:
- PyErr_SetString(PyExc_RuntimeError, "Unknown error reading memory");
+ PyErr_Format(PyExc_RuntimeError,
+ "mach_vm_read_overwrite failed for PID %d at address 0x%lx "
+ "(size %zu): kern_return_t %d",
+ handle->pid, remote_address, len, kr);
}
return -1;
}
@@ -868,7 +1019,10 @@ _Py_RemoteDebug_PagedReadRemoteMemory(proc_handle_t *handle,
if (entry->data == NULL) {
entry->data = PyMem_RawMalloc(page_size);
if (entry->data == NULL) {
- PyErr_NoMemory();
+ _set_debug_exception_cause(PyExc_MemoryError,
+ "Cannot allocate %zu bytes for page cache entry "
+ "during read from PID %d at address 0x%lx",
+ page_size, handle->pid, addr);
return -1;
}
}
@@ -900,13 +1054,16 @@ _Py_RemoteDebug_ReadDebugOffsets(
*runtime_start_address = _Py_RemoteDebug_GetPyRuntimeAddress(handle);
if (!*runtime_start_address) {
if (!PyErr_Occurred()) {
- PyErr_SetString(
- PyExc_RuntimeError, "Failed to get PyRuntime address");
+ PyErr_Format(PyExc_RuntimeError,
+ "Failed to locate PyRuntime address for PID %d",
+ handle->pid);
}
+ _set_debug_exception_cause(PyExc_RuntimeError, "PyRuntime address lookup failed during debug offsets initialization");
return -1;
}
size_t size = sizeof(struct _Py_DebugOffsets);
if (0 != _Py_RemoteDebug_ReadRemoteMemory(handle, *runtime_start_address, size, debug_offsets)) {
+ _set_debug_exception_cause(PyExc_RuntimeError, "Failed to read debug offsets structure from remote process");
return -1;
}
return 0;
diff --git a/Python/specialize.c b/Python/specialize.c
index 06995d46d8b..92f79d39d55 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -2904,53 +2904,57 @@ int
#endif // Py_STATS
Py_NO_INLINE void
-_Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg)
+_Py_Specialize_ForIter(_PyStackRef iter, _PyStackRef null_or_index, _Py_CODEUNIT *instr, int oparg)
{
assert(ENABLE_SPECIALIZATION_FT);
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
PyTypeObject *tp = Py_TYPE(iter_o);
+
+ if (PyStackRef_IsNull(null_or_index)) {
#ifdef Py_GIL_DISABLED
- // Only specialize for uniquely referenced iterators, so that we know
- // they're only referenced by this one thread. This is more limiting
- // than we need (even `it = iter(mylist); for item in it:` won't get
- // specialized) but we don't have a way to check whether we're the only
- // _thread_ who has access to the object.
- if (!_PyObject_IsUniquelyReferenced(iter_o))
- goto failure;
-#endif
- if (tp == &PyListIter_Type) {
-#ifdef Py_GIL_DISABLED
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) &&
- !_PyObject_GC_IS_SHARED(it->it_seq)) {
- // Maybe this should just set GC_IS_SHARED in a critical
- // section, instead of leaving it to the first iteration?
+ // Only specialize for uniquely referenced iterators, so that we know
+ // they're only referenced by this one thread. This is more limiting
+ // than we need (even `it = iter(mylist); for item in it:` won't get
+ // specialized) but we don't have a way to check whether we're the only
+ // _thread_ who has access to the object.
+ if (!_PyObject_IsUniquelyReferenced(iter_o)) {
goto failure;
}
#endif
- specialize(instr, FOR_ITER_LIST);
- return;
- }
- else if (tp == &PyTupleIter_Type) {
- specialize(instr, FOR_ITER_TUPLE);
- return;
- }
- else if (tp == &PyRangeIter_Type) {
- specialize(instr, FOR_ITER_RANGE);
- return;
+ if (tp == &PyRangeIter_Type) {
+ specialize(instr, FOR_ITER_RANGE);
+ return;
+ }
+ else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
+ // Generators are very much not thread-safe, so don't worry about
+ // the specialization not being thread-safe.
+ assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
+ instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
+ );
+ /* Don't specialize if PEP 523 is active */
+ if (_PyInterpreterState_GET()->eval_frame) {
+ goto failure;
+ }
+ specialize(instr, FOR_ITER_GEN);
+ return;
+ }
}
- else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
- // Generators are very much not thread-safe, so don't worry about
- // the specialization not being thread-safe.
- assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
- instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
- );
- /* Don't specialize if PEP 523 is active */
- if (_PyInterpreterState_GET()->eval_frame)
- goto failure;
- specialize(instr, FOR_ITER_GEN);
- return;
+ else {
+ if (tp == &PyList_Type) {
+#ifdef Py_GIL_DISABLED
+ // Only specialize for lists owned by this thread or shared
+ if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
+ goto failure;
+ }
+#endif
+ specialize(instr, FOR_ITER_LIST);
+ return;
+ }
+ else if (tp == &PyTuple_Type) {
+ specialize(instr, FOR_ITER_TUPLE);
+ return;
+ }
}
failure:
SPECIALIZATION_FAIL(FOR_ITER,
diff --git a/Python/stackrefs.c b/Python/stackrefs.c
index 69d4e8b9431..b2a1369031a 100644
--- a/Python/stackrefs.c
+++ b/Python/stackrefs.c
@@ -216,4 +216,12 @@ PyStackRef_IsNullOrInt(_PyStackRef ref)
return PyStackRef_IsNull(ref) || PyStackRef_IsTaggedInt(ref);
}
+_PyStackRef
+PyStackRef_IncrementTaggedIntNoOverflow(_PyStackRef ref)
+{
+ assert(ref.index <= INT_MAX - 2); // No overflow
+ return (_PyStackRef){ .index = ref.index + 2 };
+}
+
+
#endif
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 4ed045e3297..e5ae841d195 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -76,12 +76,12 @@ module sys
PyObject *
-_PySys_GetRequiredAttr(PyObject *name)
+PySys_GetAttr(PyObject *name)
{
if (!PyUnicode_Check(name)) {
PyErr_Format(PyExc_TypeError,
- "attribute name must be string, not '%.200s'",
- Py_TYPE(name)->tp_name);
+ "attribute name must be string, not '%T'",
+ name);
return NULL;
}
PyThreadState *tstate = _PyThreadState_GET();
@@ -98,7 +98,7 @@ _PySys_GetRequiredAttr(PyObject *name)
}
PyObject *
-_PySys_GetRequiredAttrString(const char *name)
+PySys_GetAttrString(const char *name)
{
PyThreadState *tstate = _PyThreadState_GET();
PyObject *sysdict = tstate->interp->sysdict;
@@ -114,12 +114,12 @@ _PySys_GetRequiredAttrString(const char *name)
}
int
-_PySys_GetOptionalAttr(PyObject *name, PyObject **value)
+PySys_GetOptionalAttr(PyObject *name, PyObject **value)
{
if (!PyUnicode_Check(name)) {
PyErr_Format(PyExc_TypeError,
- "attribute name must be string, not '%.200s'",
- Py_TYPE(name)->tp_name);
+ "attribute name must be string, not '%T'",
+ name);
*value = NULL;
return -1;
}
@@ -133,7 +133,7 @@ _PySys_GetOptionalAttr(PyObject *name, PyObject **value)
}
int
-_PySys_GetOptionalAttrString(const char *name, PyObject **value)
+PySys_GetOptionalAttrString(const char *name, PyObject **value)
{
PyThreadState *tstate = _PyThreadState_GET();
PyObject *sysdict = tstate->interp->sysdict;
@@ -773,7 +773,7 @@ sys_displayhook(PyObject *module, PyObject *o)
}
if (PyObject_SetAttr(builtins, _Py_LATIN1_CHR('_'), Py_None) != 0)
return NULL;
- outf = _PySys_GetRequiredAttr(&_Py_ID(stdout));
+ outf = PySys_GetAttr(&_Py_ID(stdout));
if (outf == NULL) {
return NULL;
}
@@ -3005,7 +3005,7 @@ static PyObject *
get_warnoptions(PyThreadState *tstate)
{
PyObject *warnoptions;
- if (_PySys_GetOptionalAttr(&_Py_ID(warnoptions), &warnoptions) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(warnoptions), &warnoptions) < 0) {
return NULL;
}
if (warnoptions == NULL || !PyList_Check(warnoptions)) {
@@ -3042,7 +3042,7 @@ PySys_ResetWarnOptions(void)
}
PyObject *warnoptions;
- if (_PySys_GetOptionalAttr(&_Py_ID(warnoptions), &warnoptions) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(warnoptions), &warnoptions) < 0) {
PyErr_Clear();
return;
}
@@ -3106,7 +3106,7 @@ PyAPI_FUNC(int)
PySys_HasWarnOptions(void)
{
PyObject *warnoptions;
- if (_PySys_GetOptionalAttr(&_Py_ID(warnoptions), &warnoptions) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(warnoptions), &warnoptions) < 0) {
PyErr_Clear();
return 0;
}
@@ -3120,7 +3120,7 @@ static PyObject *
get_xoptions(PyThreadState *tstate)
{
PyObject *xoptions;
- if (_PySys_GetOptionalAttr(&_Py_ID(_xoptions), &xoptions) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(_xoptions), &xoptions) < 0) {
return NULL;
}
if (xoptions == NULL || !PyDict_Check(xoptions)) {
@@ -3373,7 +3373,7 @@ sys_set_flag(PyObject *flags, Py_ssize_t pos, PyObject *value)
int
_PySys_SetFlagObj(Py_ssize_t pos, PyObject *value)
{
- PyObject *flags = _PySys_GetRequiredAttrString("flags");
+ PyObject *flags = PySys_GetAttrString("flags");
if (flags == NULL) {
return -1;
}
@@ -3935,7 +3935,7 @@ _PySys_UpdateConfig(PyThreadState *tstate)
#undef COPY_WSTR
// sys.flags
- PyObject *flags = _PySys_GetRequiredAttrString("flags");
+ PyObject *flags = PySys_GetAttrString("flags");
if (flags == NULL) {
return -1;
}
@@ -4251,7 +4251,7 @@ PySys_SetArgvEx(int argc, wchar_t **argv, int updatepath)
}
PyObject *sys_path;
- if (_PySys_GetOptionalAttr(&_Py_ID(path), &sys_path) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(path), &sys_path) < 0) {
Py_FatalError("can't get sys.path");
}
else if (sys_path != NULL) {
@@ -4347,7 +4347,7 @@ sys_write(PyObject *key, FILE *fp, const char *format, va_list va)
PyObject *exc = _PyErr_GetRaisedException(tstate);
written = PyOS_vsnprintf(buffer, sizeof(buffer), format, va);
- file = _PySys_GetRequiredAttr(key);
+ file = PySys_GetAttr(key);
if (sys_pyfile_write(buffer, file) != 0) {
_PyErr_Clear(tstate);
fputs(buffer, fp);
@@ -4391,7 +4391,7 @@ sys_format(PyObject *key, FILE *fp, const char *format, va_list va)
PyObject *exc = _PyErr_GetRaisedException(tstate);
message = PyUnicode_FromFormatV(format, va);
if (message != NULL) {
- file = _PySys_GetRequiredAttr(key);
+ file = PySys_GetAttr(key);
if (sys_pyfile_write_unicode(message, file) != 0) {
_PyErr_Clear(tstate);
utf8 = PyUnicode_AsUTF8(message);
diff --git a/Python/thread.c b/Python/thread.c
index 4ff5f11a348..18c4af7f634 100644
--- a/Python/thread.c
+++ b/Python/thread.c
@@ -39,7 +39,8 @@
const long long PY_TIMEOUT_MAX = PY_TIMEOUT_MAX_VALUE;
-static void PyThread__init_thread(void); /* Forward */
+/* Forward declaration */
+static void PyThread__init_thread(void);
#define initialized _PyRuntime.threads.initialized
@@ -71,6 +72,79 @@ PyThread_init_thread(void)
#endif
+/*
+ * Lock support.
+ */
+
+PyThread_type_lock
+PyThread_allocate_lock(void)
+{
+ if (!initialized) {
+ PyThread_init_thread();
+ }
+
+ PyMutex *lock = (PyMutex *)PyMem_RawMalloc(sizeof(PyMutex));
+ if (lock) {
+ *lock = (PyMutex){0};
+ }
+
+ return (PyThread_type_lock)lock;
+}
+
+void
+PyThread_free_lock(PyThread_type_lock lock)
+{
+ PyMem_RawFree(lock);
+}
+
+PyLockStatus
+PyThread_acquire_lock_timed(PyThread_type_lock lock, PY_TIMEOUT_T microseconds,
+ int intr_flag)
+{
+ PyTime_t timeout; // relative timeout
+ if (microseconds >= 0) {
+ // bpo-41710: PyThread_acquire_lock_timed() cannot report timeout
+ // overflow to the caller, so clamp the timeout to
+ // [PyTime_MIN, PyTime_MAX].
+ //
+ // PyTime_MAX nanoseconds is around 292.3 years.
+ //
+ // _thread.Lock.acquire() and _thread.RLock.acquire() raise an
+ // OverflowError if microseconds is greater than PY_TIMEOUT_MAX.
+ timeout = _PyTime_FromMicrosecondsClamp(microseconds);
+ }
+ else {
+ timeout = -1;
+ }
+
+ _PyLockFlags flags = _Py_LOCK_DONT_DETACH;
+ if (intr_flag) {
+ flags |= _PY_FAIL_IF_INTERRUPTED;
+ }
+
+ return _PyMutex_LockTimed((PyMutex *)lock, timeout, flags);
+}
+
+void
+PyThread_release_lock(PyThread_type_lock lock)
+{
+ PyMutex_Unlock((PyMutex *)lock);
+}
+
+int
+_PyThread_at_fork_reinit(PyThread_type_lock *lock)
+{
+ _PyMutex_at_fork_reinit((PyMutex *)lock);
+ return 0;
+}
+
+int
+PyThread_acquire_lock(PyThread_type_lock lock, int waitflag)
+{
+ return PyThread_acquire_lock_timed(lock, waitflag ? -1 : 0, /*intr_flag=*/0);
+}
+
+
/* return the current thread stack size */
size_t
PyThread_get_stacksize(void)
@@ -261,11 +335,7 @@ PyThread_GetInfo(void)
#ifdef HAVE_PTHREAD_STUBS
value = Py_NewRef(Py_None);
#elif defined(_POSIX_THREADS)
-#ifdef USE_SEMAPHORES
- value = PyUnicode_FromString("semaphore");
-#else
- value = PyUnicode_FromString("mutex+cond");
-#endif
+ value = PyUnicode_FromString("pymutex");
if (value == NULL) {
Py_DECREF(threadinfo);
return NULL;
diff --git a/Python/thread_nt.h b/Python/thread_nt.h
index e078b98be3c..9a29d14ef67 100644
--- a/Python/thread_nt.h
+++ b/Python/thread_nt.h
@@ -300,98 +300,6 @@ PyThread_hang_thread(void)
}
}
-/*
- * Lock support. It has to be implemented as semaphores.
- * I [Dag] tried to implement it with mutex but I could find a way to
- * tell whether a thread already own the lock or not.
- */
-PyThread_type_lock
-PyThread_allocate_lock(void)
-{
- PNRMUTEX mutex;
-
- if (!initialized)
- PyThread_init_thread();
-
- mutex = AllocNonRecursiveMutex() ;
-
- PyThread_type_lock aLock = (PyThread_type_lock) mutex;
- assert(aLock);
-
- return aLock;
-}
-
-void
-PyThread_free_lock(PyThread_type_lock aLock)
-{
- FreeNonRecursiveMutex(aLock) ;
-}
-
-// WaitForSingleObject() accepts timeout in milliseconds in the range
-// [0; 0xFFFFFFFE] (DWORD type). INFINITE value (0xFFFFFFFF) means no
-// timeout. 0xFFFFFFFE milliseconds is around 49.7 days.
-const DWORD TIMEOUT_MS_MAX = 0xFFFFFFFE;
-
-/*
- * Return 1 on success if the lock was acquired
- *
- * and 0 if the lock was not acquired. This means a 0 is returned
- * if the lock has already been acquired by this thread!
- */
-PyLockStatus
-PyThread_acquire_lock_timed(PyThread_type_lock aLock,
- PY_TIMEOUT_T microseconds, int intr_flag)
-{
- assert(aLock);
-
- /* Fow now, intr_flag does nothing on Windows, and lock acquires are
- * uninterruptible. */
- PyLockStatus success;
- PY_TIMEOUT_T milliseconds;
-
- if (microseconds >= 0) {
- milliseconds = microseconds / 1000;
- // Round milliseconds away from zero
- if (microseconds % 1000 > 0) {
- milliseconds++;
- }
- if (milliseconds > (PY_TIMEOUT_T)TIMEOUT_MS_MAX) {
- // bpo-41710: PyThread_acquire_lock_timed() cannot report timeout
- // overflow to the caller, so clamp the timeout to
- // [0, TIMEOUT_MS_MAX] milliseconds.
- //
- // _thread.Lock.acquire() and _thread.RLock.acquire() raise an
- // OverflowError if microseconds is greater than PY_TIMEOUT_MAX.
- milliseconds = TIMEOUT_MS_MAX;
- }
- assert(milliseconds != INFINITE);
- }
- else {
- milliseconds = INFINITE;
- }
-
- if (EnterNonRecursiveMutex((PNRMUTEX)aLock,
- (DWORD)milliseconds) == WAIT_OBJECT_0) {
- success = PY_LOCK_ACQUIRED;
- }
- else {
- success = PY_LOCK_FAILURE;
- }
-
- return success;
-}
-int
-PyThread_acquire_lock(PyThread_type_lock aLock, int waitflag)
-{
- return PyThread_acquire_lock_timed(aLock, waitflag ? -1 : 0, 0);
-}
-
-void
-PyThread_release_lock(PyThread_type_lock aLock)
-{
- assert(aLock);
- (void)LeaveNonRecursiveMutex((PNRMUTEX) aLock);
-}
/* minimum/maximum thread stack sizes supported */
#define THREAD_MIN_STACKSIZE 0x8000 /* 32 KiB */
diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h
index da405824244..13992f95723 100644
--- a/Python/thread_pthread.h
+++ b/Python/thread_pthread.h
@@ -99,16 +99,6 @@
#undef HAVE_SEM_CLOCKWAIT
#endif
-/* Whether or not to use semaphores directly rather than emulating them with
- * mutexes and condition variables:
- */
-#if (defined(_POSIX_SEMAPHORES) && !defined(HAVE_BROKEN_POSIX_SEMAPHORES) && \
- (defined(HAVE_SEM_TIMEDWAIT) || defined(HAVE_SEM_CLOCKWAIT)))
-# define USE_SEMAPHORES
-#else
-# undef USE_SEMAPHORES
-#endif
-
/* On platforms that don't use standard POSIX threads pthread_sigmask()
* isn't present. DEC threads uses sigprocmask() instead as do most
@@ -442,388 +432,6 @@ PyThread_hang_thread(void)
}
}
-#ifdef USE_SEMAPHORES
-
-/*
- * Lock support.
- */
-
-PyThread_type_lock
-PyThread_allocate_lock(void)
-{
- sem_t *lock;
- int status, error = 0;
-
- if (!initialized)
- PyThread_init_thread();
-
- lock = (sem_t *)PyMem_RawMalloc(sizeof(sem_t));
-
- if (lock) {
- status = sem_init(lock,0,1);
- CHECK_STATUS("sem_init");
-
- if (error) {
- PyMem_RawFree((void *)lock);
- lock = NULL;
- }
- }
-
- return (PyThread_type_lock)lock;
-}
-
-void
-PyThread_free_lock(PyThread_type_lock lock)
-{
- sem_t *thelock = (sem_t *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- if (!thelock)
- return;
-
- status = sem_destroy(thelock);
- CHECK_STATUS("sem_destroy");
-
- PyMem_RawFree((void *)thelock);
-}
-
-/*
- * As of February 2002, Cygwin thread implementations mistakenly report error
- * codes in the return value of the sem_ calls (like the pthread_ functions).
- * Correct implementations return -1 and put the code in errno. This supports
- * either.
- */
-static int
-fix_status(int status)
-{
- return (status == -1) ? errno : status;
-}
-
-PyLockStatus
-PyThread_acquire_lock_timed(PyThread_type_lock lock, PY_TIMEOUT_T microseconds,
- int intr_flag)
-{
- PyLockStatus success;
- sem_t *thelock = (sem_t *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- PyTime_t timeout; // relative timeout
- if (microseconds >= 0) {
- // bpo-41710: PyThread_acquire_lock_timed() cannot report timeout
- // overflow to the caller, so clamp the timeout to
- // [PyTime_MIN, PyTime_MAX].
- //
- // PyTime_MAX nanoseconds is around 292.3 years.
- //
- // _thread.Lock.acquire() and _thread.RLock.acquire() raise an
- // OverflowError if microseconds is greater than PY_TIMEOUT_MAX.
- timeout = _PyTime_FromMicrosecondsClamp(microseconds);
- }
- else {
- timeout = -1;
- }
-
-#ifdef HAVE_SEM_CLOCKWAIT
- struct timespec abs_timeout;
- // Local scope for deadline
- {
- PyTime_t now;
- // silently ignore error: cannot report error to the caller
- (void)PyTime_MonotonicRaw(&now);
- PyTime_t deadline = _PyTime_Add(now, timeout);
- _PyTime_AsTimespec_clamp(deadline, &abs_timeout);
- }
-#else
- PyTime_t deadline = 0;
- if (timeout > 0 && !intr_flag) {
- deadline = _PyDeadline_Init(timeout);
- }
-#endif
-
- while (1) {
- if (timeout > 0) {
-#ifdef HAVE_SEM_CLOCKWAIT
- status = fix_status(sem_clockwait(thelock, CLOCK_MONOTONIC,
- &abs_timeout));
-#else
- PyTime_t now;
- // silently ignore error: cannot report error to the caller
- (void)PyTime_TimeRaw(&now);
- PyTime_t abs_time = _PyTime_Add(now, timeout);
-
- struct timespec ts;
- _PyTime_AsTimespec_clamp(abs_time, &ts);
- status = fix_status(sem_timedwait(thelock, &ts));
-#endif
- }
- else if (timeout == 0) {
- status = fix_status(sem_trywait(thelock));
- }
- else {
- status = fix_status(sem_wait(thelock));
- }
-
- /* Retry if interrupted by a signal, unless the caller wants to be
- notified. */
- if (intr_flag || status != EINTR) {
- break;
- }
-
- // sem_clockwait() uses an absolute timeout, there is no need
- // to recompute the relative timeout.
-#ifndef HAVE_SEM_CLOCKWAIT
- if (timeout > 0) {
- /* wait interrupted by a signal (EINTR): recompute the timeout */
- timeout = _PyDeadline_Get(deadline);
- if (timeout < 0) {
- status = ETIMEDOUT;
- break;
- }
- }
-#endif
- }
-
- /* Don't check the status if we're stopping because of an interrupt. */
- if (!(intr_flag && status == EINTR)) {
- if (timeout > 0) {
- if (status != ETIMEDOUT) {
-#ifdef HAVE_SEM_CLOCKWAIT
- CHECK_STATUS("sem_clockwait");
-#else
- CHECK_STATUS("sem_timedwait");
-#endif
- }
- }
- else if (timeout == 0) {
- if (status != EAGAIN) {
- CHECK_STATUS("sem_trywait");
- }
- }
- else {
- CHECK_STATUS("sem_wait");
- }
- }
-
- if (status == 0) {
- success = PY_LOCK_ACQUIRED;
- } else if (intr_flag && status == EINTR) {
- success = PY_LOCK_INTR;
- } else {
- success = PY_LOCK_FAILURE;
- }
-
- return success;
-}
-
-void
-PyThread_release_lock(PyThread_type_lock lock)
-{
- sem_t *thelock = (sem_t *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- status = sem_post(thelock);
- CHECK_STATUS("sem_post");
-}
-
-#else /* USE_SEMAPHORES */
-
-/*
- * Lock support.
- */
-PyThread_type_lock
-PyThread_allocate_lock(void)
-{
- pthread_lock *lock;
- int status, error = 0;
-
- if (!initialized)
- PyThread_init_thread();
-
- lock = (pthread_lock *) PyMem_RawCalloc(1, sizeof(pthread_lock));
- if (lock) {
- lock->locked = 0;
-
- status = pthread_mutex_init(&lock->mut, NULL);
- CHECK_STATUS_PTHREAD("pthread_mutex_init");
- /* Mark the pthread mutex underlying a Python mutex as
- pure happens-before. We can't simply mark the
- Python-level mutex as a mutex because it can be
- acquired and released in different threads, which
- will cause errors. */
- _Py_ANNOTATE_PURE_HAPPENS_BEFORE_MUTEX(&lock->mut);
-
- status = _PyThread_cond_init(&lock->lock_released);
- CHECK_STATUS_PTHREAD("pthread_cond_init");
-
- if (error) {
- PyMem_RawFree((void *)lock);
- lock = 0;
- }
- }
-
- return (PyThread_type_lock) lock;
-}
-
-void
-PyThread_free_lock(PyThread_type_lock lock)
-{
- pthread_lock *thelock = (pthread_lock *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- /* some pthread-like implementations tie the mutex to the cond
- * and must have the cond destroyed first.
- */
- status = pthread_cond_destroy( &thelock->lock_released );
- CHECK_STATUS_PTHREAD("pthread_cond_destroy");
-
- status = pthread_mutex_destroy( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_destroy");
-
- PyMem_RawFree((void *)thelock);
-}
-
-PyLockStatus
-PyThread_acquire_lock_timed(PyThread_type_lock lock, PY_TIMEOUT_T microseconds,
- int intr_flag)
-{
- PyLockStatus success = PY_LOCK_FAILURE;
- pthread_lock *thelock = (pthread_lock *)lock;
- int status, error = 0;
-
- if (microseconds == 0) {
- status = pthread_mutex_trylock( &thelock->mut );
- if (status != EBUSY) {
- CHECK_STATUS_PTHREAD("pthread_mutex_trylock[1]");
- }
- }
- else {
- status = pthread_mutex_lock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_lock[1]");
- }
- if (status != 0) {
- goto done;
- }
-
- if (thelock->locked == 0) {
- success = PY_LOCK_ACQUIRED;
- goto unlock;
- }
- if (microseconds == 0) {
- goto unlock;
- }
-
- struct timespec abs_timeout;
- if (microseconds > 0) {
- _PyThread_cond_after(microseconds, &abs_timeout);
- }
- // Continue trying until we get the lock
-
- // mut must be locked by me -- part of the condition protocol
- while (1) {
- if (microseconds > 0) {
- status = pthread_cond_timedwait(&thelock->lock_released,
- &thelock->mut, &abs_timeout);
- if (status == 1) {
- break;
- }
- if (status == ETIMEDOUT) {
- break;
- }
- CHECK_STATUS_PTHREAD("pthread_cond_timedwait");
- }
- else {
- status = pthread_cond_wait(
- &thelock->lock_released,
- &thelock->mut);
- CHECK_STATUS_PTHREAD("pthread_cond_wait");
- }
-
- if (intr_flag && status == 0 && thelock->locked) {
- // We were woken up, but didn't get the lock. We probably received
- // a signal. Return PY_LOCK_INTR to allow the caller to handle
- // it and retry.
- success = PY_LOCK_INTR;
- break;
- }
-
- if (status == 0 && !thelock->locked) {
- success = PY_LOCK_ACQUIRED;
- break;
- }
-
- // Wait got interrupted by a signal: retry
- }
-
-unlock:
- if (success == PY_LOCK_ACQUIRED) {
- thelock->locked = 1;
- }
- status = pthread_mutex_unlock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_unlock[1]");
-
-done:
- if (error) {
- success = PY_LOCK_FAILURE;
- }
- return success;
-}
-
-void
-PyThread_release_lock(PyThread_type_lock lock)
-{
- pthread_lock *thelock = (pthread_lock *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- status = pthread_mutex_lock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_lock[3]");
-
- thelock->locked = 0;
-
- /* wake up someone (anyone, if any) waiting on the lock */
- status = pthread_cond_signal( &thelock->lock_released );
- CHECK_STATUS_PTHREAD("pthread_cond_signal");
-
- status = pthread_mutex_unlock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_unlock[3]");
-}
-
-#endif /* USE_SEMAPHORES */
-
-int
-_PyThread_at_fork_reinit(PyThread_type_lock *lock)
-{
- PyThread_type_lock new_lock = PyThread_allocate_lock();
- if (new_lock == NULL) {
- return -1;
- }
-
- /* bpo-6721, bpo-40089: The old lock can be in an inconsistent state.
- fork() can be called in the middle of an operation on the lock done by
- another thread. So don't call PyThread_free_lock(*lock).
-
- Leak memory on purpose. Don't release the memory either since the
- address of a mutex is relevant. Putting two mutexes at the same address
- can lead to problems. */
-
- *lock = new_lock;
- return 0;
-}
-
-int
-PyThread_acquire_lock(PyThread_type_lock lock, int waitflag)
-{
- return PyThread_acquire_lock_timed(lock, waitflag ? -1 : 0, /*intr_flag=*/0);
-}
/* set the thread stack size.
* Return 0 if size is valid, -1 if size is invalid,
diff --git a/Python/traceback.c b/Python/traceback.c
index c06cb1a5908..4f674eaf557 100644
--- a/Python/traceback.c
+++ b/Python/traceback.c
@@ -9,7 +9,6 @@
#include "pycore_interpframe.h" // _PyFrame_GetCode()
#include "pycore_pyerrors.h" // _PyErr_GetRaisedException()
#include "pycore_pystate.h" // _PyThreadState_GET()
-#include "pycore_sysmodule.h" // _PySys_GetOptionalAttr()
#include "pycore_traceback.h" // EXCEPTION_TB_HEADER
#include "frameobject.h" // PyFrame_New()
@@ -399,7 +398,7 @@ _Py_FindSourceFile(PyObject *filename, char* namebuf, size_t namelen, PyObject *
taillen = strlen(tail);
PyThreadState *tstate = _PyThreadState_GET();
- if (_PySys_GetOptionalAttr(&_Py_ID(path), &syspath) < 0) {
+ if (PySys_GetOptionalAttr(&_Py_ID(path), &syspath) < 0) {
PyErr_Clear();
goto error;
}
@@ -777,7 +776,7 @@ _PyTraceBack_Print(PyObject *v, const char *header, PyObject *f)
PyErr_BadInternalCall();
return -1;
}
- if (_PySys_GetOptionalAttrString("tracebacklimit", &limitv) < 0) {
+ if (PySys_GetOptionalAttrString("tracebacklimit", &limitv) < 0) {
return -1;
}
else if (limitv != NULL && PyLong_Check(limitv)) {
diff --git a/README.rst b/README.rst
index 5bad7ea1c36..baea5e0978d 100644
--- a/README.rst
+++ b/README.rst
@@ -135,8 +135,8 @@ libraries for additional performance gains.
What's New
----------
-We have a comprehensive overview of the changes in the `What's New in Python
-3.14 <https://docs.python.org/3.14/whatsnew/3.14.html>`_ document. For a more
+We have a comprehensive overview of the changes in the `What's new in Python
+3.15 <https://docs.python.org/3.15/whatsnew/3.15.html>`_ document. For a more
detailed change log, read `Misc/NEWS
<https://github.com/python/cpython/tree/main/Misc/NEWS.d>`_, but a full
accounting of changes can only be gleaned from the `commit history
@@ -149,7 +149,7 @@ entitled "Installing multiple versions".
Documentation
-------------
-`Documentation for Python 3.14 <https://docs.python.org/3.14/>`_ is online,
+`Documentation for Python 3.15 <https://docs.python.org/3.15/>`_ is online,
updated daily.
It can also be downloaded in many formats for faster access. The documentation
@@ -200,15 +200,15 @@ intend to install multiple versions using the same prefix you must decide which
version (if any) is your "primary" version. Install that version using
``make install``. Install all other versions using ``make altinstall``.
-For example, if you want to install Python 2.7, 3.6, and 3.14 with 3.14 being the
-primary version, you would execute ``make install`` in your 3.14 build directory
+For example, if you want to install Python 2.7, 3.6, and 3.15 with 3.15 being the
+primary version, you would execute ``make install`` in your 3.15 build directory
and ``make altinstall`` in the others.
Release Schedule
----------------
-See `PEP 745 <https://peps.python.org/pep-0745/>`__ for Python 3.14 release details.
+See `PEP 790 <https://peps.python.org/pep-0790/>`__ for Python 3.15 release details.
Copyright and License Information
diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py
index 5845f2d85c7..df52f8de762 100644
--- a/Tools/build/generate_sbom.py
+++ b/Tools/build/generate_sbom.py
@@ -4,6 +4,7 @@ import glob
import hashlib
import json
import os
+import random
import re
import subprocess
import sys
@@ -164,16 +165,18 @@ def get_externals() -> list[str]:
def download_with_retries(download_location: str,
- max_retries: int = 5,
- base_delay: float = 2.0) -> typing.Any:
+ max_retries: int = 7,
+ base_delay: float = 2.25,
+ max_jitter: float = 1.0) -> typing.Any:
"""Download a file with exponential backoff retry."""
- for attempt in range(max_retries):
+ for attempt in range(max_retries + 1):
try:
resp = urllib.request.urlopen(download_location)
except urllib.error.URLError as ex:
if attempt == max_retries:
- raise ex
- time.sleep(base_delay**attempt)
+ msg = f"Download from {download_location} failed."
+ raise OSError(msg) from ex
+ time.sleep(base_delay**attempt + random.uniform(0, max_jitter))
else:
return resp
diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py
index 96d64a27e0a..3070559db8a 100644
--- a/Tools/cases_generator/analyzer.py
+++ b/Tools/cases_generator/analyzer.py
@@ -679,8 +679,11 @@ NON_ESCAPING_FUNCTIONS = (
"PyStackRef_IsTaggedInt",
"PyStackRef_TagInt",
"PyStackRef_UntagInt",
+ "PyStackRef_IncrementTaggedIntNoOverflow",
+ "PyStackRef_IsNullOrInt",
)
+
def check_escaping_calls(instr: parser.CodeDef, escapes: dict[SimpleStmt, EscapingCall]) -> None:
error: lexer.Token | None = None
calls = {e.call for e in escapes.values()}
diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py
index 9e60d219a71..02f9a952754 100644
--- a/Tools/cases_generator/generators_common.py
+++ b/Tools/cases_generator/generators_common.py
@@ -140,6 +140,7 @@ class Emitter:
) -> bool:
if storage.spilled:
raise analysis_error("stack_pointer needs reloading before dispatch", tkn)
+ storage.stack.flush(self.out)
self.emit(tkn)
return False
diff --git a/Tools/ftscalingbench/ftscalingbench.py b/Tools/ftscalingbench/ftscalingbench.py
index 926bc66b944..1a59e25189d 100644
--- a/Tools/ftscalingbench/ftscalingbench.py
+++ b/Tools/ftscalingbench/ftscalingbench.py
@@ -27,6 +27,7 @@ import queue
import sys
import threading
import time
+from operator import methodcaller
# The iterations in individual benchmarks are scaled by this factor.
WORK_SCALE = 100
@@ -188,6 +189,18 @@ def thread_local_read():
_ = tmp.x
_ = tmp.x
+class MyClass:
+ __slots__ = ()
+
+ def func(self):
+ pass
+
+@register_benchmark
+def method_caller():
+ mc = methodcaller("func")
+ obj = MyClass()
+ for i in range(1000 * WORK_SCALE):
+ mc(obj)
def bench_one_thread(func):
t0 = time.perf_counter_ns()
diff --git a/Tools/inspection/benchmark_external_inspection.py b/Tools/inspection/benchmark_external_inspection.py
new file mode 100644
index 00000000000..62182194c1a
--- /dev/null
+++ b/Tools/inspection/benchmark_external_inspection.py
@@ -0,0 +1,459 @@
+import _remote_debugging
+import time
+import subprocess
+import sys
+import contextlib
+import tempfile
+import os
+import argparse
+from _colorize import get_colors, can_colorize
+
+CODE = '''\
+import time
+import os
+import sys
+import math
+
+def slow_fibonacci(n):
+ """Intentionally slow recursive fibonacci - should show up prominently in profiler"""
+ if n <= 1:
+ return n
+ return slow_fibonacci(n-1) + slow_fibonacci(n-2)
+
+def medium_computation():
+ """Medium complexity function"""
+ result = 0
+ for i in range(1000):
+ result += math.sqrt(i) * math.sin(i)
+ return result
+
+def fast_loop():
+ """Fast simple loop"""
+ total = 0
+ for i in range(100):
+ total += i
+ return total
+
+def string_operations():
+ """String manipulation that should be visible in profiler"""
+ text = "hello world " * 100
+ words = text.split()
+ return " ".join(reversed(words))
+
+def nested_calls():
+ """Nested function calls to test call stack depth"""
+ def level1():
+ def level2():
+ def level3():
+ return medium_computation()
+ return level3()
+ return level2()
+ return level1()
+
+def main_loop():
+ """Main computation loop with different execution paths"""
+ iteration = 0
+
+ while True:
+ iteration += 1
+
+ # Different execution paths with different frequencies
+ if iteration % 50 == 0:
+ # Expensive operation - should show high per-call time
+ result = slow_fibonacci(20)
+
+ elif iteration % 10 == 0:
+ # Medium operation
+ result = nested_calls()
+
+ elif iteration % 5 == 0:
+ # String operations
+ result = string_operations()
+
+ else:
+ # Fast operation - most common
+ result = fast_loop()
+
+ # Small delay to make sampling more interesting
+ time.sleep(0.001)
+
+if __name__ == "__main__":
+ main_loop()
+'''
+
+DEEP_STATIC_CODE = """\
+import time
+def factorial(n):
+ if n <= 1:
+ time.sleep(10000)
+ return 1
+ return n * factorial(n-1)
+
+factorial(900)
+"""
+
+CODE_WITH_TONS_OF_THREADS = '''\
+import time
+import threading
+import random
+import math
+
+def cpu_intensive_work():
+ """Do some CPU intensive calculations"""
+ result = 0
+ for _ in range(10000):
+ result += math.sin(random.random()) * math.cos(random.random())
+ return result
+
+def io_intensive_work():
+ """Simulate IO intensive work with sleeps"""
+ time.sleep(0.1)
+
+def mixed_workload():
+ """Mix of CPU and IO work"""
+ while True:
+ if random.random() < 0.3:
+ cpu_intensive_work()
+ else:
+ io_intensive_work()
+
+def create_threads(n):
+ """Create n threads doing mixed workloads"""
+ threads = []
+ for _ in range(n):
+ t = threading.Thread(target=mixed_workload, daemon=True)
+ t.start()
+ threads.append(t)
+ return threads
+
+# Start with 5 threads
+active_threads = create_threads(5)
+thread_count = 5
+
+# Main thread manages threads and does work
+while True:
+ # Randomly add or remove threads
+ if random.random() < 0.1: # 10% chance each iteration
+ if random.random() < 0.5 and thread_count < 100:
+ # Add 1-5 new threads
+ new_count = random.randint(1, 5)
+ new_threads = create_threads(new_count)
+ active_threads.extend(new_threads)
+ thread_count += new_count
+ elif thread_count > 10:
+ # Remove 1-3 threads
+ remove_count = random.randint(1, 5)
+ # The threads will terminate naturally since they're daemons
+ active_threads = active_threads[remove_count:]
+ thread_count -= remove_count
+
+ cpu_intensive_work()
+ time.sleep(0.05)
+'''
+
+CODE_EXAMPLES = {
+ "basic": {
+ "code": CODE,
+ "description": "Mixed workload with fibonacci, computations, and string operations",
+ },
+ "deep_static": {
+ "code": DEEP_STATIC_CODE,
+ "description": "Deep recursive call stack with 900+ frames (factorial)",
+ },
+ "threads": {
+ "code": CODE_WITH_TONS_OF_THREADS,
+ "description": "Tons of threads doing mixed CPU/IO work",
+ },
+}
+
+
+def benchmark(unwinder, duration_seconds=10):
+ """Benchmark mode - measure raw sampling speed for specified duration"""
+ sample_count = 0
+ fail_count = 0
+ total_work_time = 0.0
+ start_time = time.perf_counter()
+ end_time = start_time + duration_seconds
+
+ colors = get_colors(can_colorize())
+
+ print(
+ f"{colors.BOLD_BLUE}Benchmarking sampling speed for {duration_seconds} seconds...{colors.RESET}"
+ )
+
+ try:
+ while time.perf_counter() < end_time:
+ work_start = time.perf_counter()
+ try:
+ stack_trace = unwinder.get_stack_trace()
+ if stack_trace:
+ sample_count += 1
+ except (OSError, RuntimeError, UnicodeDecodeError) as e:
+ fail_count += 1
+
+ work_end = time.perf_counter()
+ total_work_time += work_end - work_start
+
+ total_attempts = sample_count + fail_count
+ if total_attempts % 10000 == 0:
+ avg_work_time_us = (total_work_time / total_attempts) * 1e6
+ work_rate = (
+ total_attempts / total_work_time if total_work_time > 0 else 0
+ )
+ success_rate = (sample_count / total_attempts) * 100
+
+ # Color code the success rate
+ if success_rate >= 95:
+ success_color = colors.GREEN
+ elif success_rate >= 80:
+ success_color = colors.YELLOW
+ else:
+ success_color = colors.RED
+
+ print(
+ f"{colors.CYAN}Attempts:{colors.RESET} {total_attempts} | "
+ f"{colors.CYAN}Success:{colors.RESET} {success_color}{success_rate:.1f}%{colors.RESET} | "
+ f"{colors.CYAN}Rate:{colors.RESET} {colors.MAGENTA}{work_rate:.1f}Hz{colors.RESET} | "
+ f"{colors.CYAN}Avg:{colors.RESET} {colors.YELLOW}{avg_work_time_us:.2f}µs{colors.RESET}"
+ )
+ except KeyboardInterrupt:
+ print(f"\n{colors.YELLOW}Benchmark interrupted by user{colors.RESET}")
+
+ actual_end_time = time.perf_counter()
+ wall_time = actual_end_time - start_time
+ total_attempts = sample_count + fail_count
+
+ # Return final statistics
+ return {
+ "wall_time": wall_time,
+ "total_attempts": total_attempts,
+ "sample_count": sample_count,
+ "fail_count": fail_count,
+ "success_rate": (
+ (sample_count / total_attempts) * 100 if total_attempts > 0 else 0
+ ),
+ "total_work_time": total_work_time,
+ "avg_work_time_us": (
+ (total_work_time / total_attempts) * 1e6 if total_attempts > 0 else 0
+ ),
+ "work_rate_hz": total_attempts / total_work_time if total_work_time > 0 else 0,
+ "samples_per_sec": sample_count / wall_time if wall_time > 0 else 0,
+ }
+
+
+def print_benchmark_results(results):
+ """Print comprehensive benchmark results"""
+ colors = get_colors(can_colorize())
+
+ print(f"\n{colors.BOLD_GREEN}{'='*60}{colors.RESET}")
+ print(f"{colors.BOLD_GREEN}get_stack_trace() Benchmark Results{colors.RESET}")
+ print(f"{colors.BOLD_GREEN}{'='*60}{colors.RESET}")
+
+ # Basic statistics
+ print(f"\n{colors.BOLD_CYAN}Basic Statistics:{colors.RESET}")
+ print(
+ f" {colors.CYAN}Wall time:{colors.RESET} {colors.YELLOW}{results['wall_time']:.3f}{colors.RESET} seconds"
+ )
+ print(
+ f" {colors.CYAN}Total attempts:{colors.RESET} {colors.MAGENTA}{results['total_attempts']:,}{colors.RESET}"
+ )
+ print(
+ f" {colors.CYAN}Successful samples:{colors.RESET} {colors.GREEN}{results['sample_count']:,}{colors.RESET}"
+ )
+ print(
+ f" {colors.CYAN}Failed samples:{colors.RESET} {colors.RED}{results['fail_count']:,}{colors.RESET}"
+ )
+
+ # Color code the success rate
+ success_rate = results["success_rate"]
+ if success_rate >= 95:
+ success_color = colors.BOLD_GREEN
+ elif success_rate >= 80:
+ success_color = colors.BOLD_YELLOW
+ else:
+ success_color = colors.BOLD_RED
+
+ print(
+ f" {colors.CYAN}Success rate:{colors.RESET} {success_color}{success_rate:.2f}%{colors.RESET}"
+ )
+
+ # Performance metrics
+ print(f"\n{colors.BOLD_CYAN}Performance Metrics:{colors.RESET}")
+ print(
+ f" {colors.CYAN}Average call time:{colors.RESET} {colors.YELLOW}{results['avg_work_time_us']:.2f}{colors.RESET} µs"
+ )
+ print(
+ f" {colors.CYAN}Work rate:{colors.RESET} {colors.MAGENTA}{results['work_rate_hz']:.1f}{colors.RESET} calls/sec"
+ )
+ print(
+ f" {colors.CYAN}Sample rate:{colors.RESET} {colors.MAGENTA}{results['samples_per_sec']:.1f}{colors.RESET} samples/sec"
+ )
+ print(
+ f" {colors.CYAN}Total work time:{colors.RESET} {colors.YELLOW}{results['total_work_time']:.3f}{colors.RESET} seconds"
+ )
+
+ # Color code work efficiency
+ efficiency = (results["total_work_time"] / results["wall_time"]) * 100
+ if efficiency >= 80:
+ efficiency_color = colors.GREEN
+ elif efficiency >= 50:
+ efficiency_color = colors.YELLOW
+ else:
+ efficiency_color = colors.RED
+
+ print(
+ f" {colors.CYAN}Work efficiency:{colors.RESET} {efficiency_color}{efficiency:.1f}%{colors.RESET}"
+ )
+
+
+def parse_arguments():
+ """Parse command line arguments"""
+ # Build the code examples description
+ examples_desc = "\n".join(
+ [f" {name}: {info['description']}" for name, info in CODE_EXAMPLES.items()]
+ )
+
+ parser = argparse.ArgumentParser(
+ description="Benchmark get_stack_trace() performance",
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ epilog=f"""
+Examples:
+ %(prog)s # Run basic benchmark for 10 seconds (default)
+ %(prog)s --duration 30 # Run basic benchmark for 30 seconds
+ %(prog)s -d 60 # Run basic benchmark for 60 seconds
+ %(prog)s --code deep_static # Run deep static call stack benchmark
+ %(prog)s --code deep_static -d 30 # Run deep static benchmark for 30 seconds
+
+Available code examples:
+{examples_desc}
+ """,
+ color=True,
+ )
+
+ parser.add_argument(
+ "--duration",
+ "-d",
+ type=int,
+ default=10,
+ help="Benchmark duration in seconds (default: 10)",
+ )
+
+ parser.add_argument(
+ "--code",
+ "-c",
+ choices=list(CODE_EXAMPLES.keys()),
+ default="basic",
+ help="Code example to benchmark (default: basic)",
+ )
+
+ return parser.parse_args()
+
+
+def create_target_process(temp_file, code_example="basic"):
+ """Create and start the target process for benchmarking"""
+ example_info = CODE_EXAMPLES.get(code_example, {"code": CODE})
+ selected_code = example_info["code"]
+ temp_file.write(selected_code)
+ temp_file.flush()
+
+ process = subprocess.Popen(
+ [sys.executable, temp_file.name], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
+
+ # Give it time to start
+ time.sleep(1.0)
+
+ # Check if it's still running
+ if process.poll() is not None:
+ stdout, stderr = process.communicate()
+ raise RuntimeError(
+ f"Target process exited unexpectedly:\nSTDOUT: {stdout.decode()}\nSTDERR: {stderr.decode()}"
+ )
+
+ return process, temp_file.name
+
+
+def cleanup_process(process, temp_file_path):
+ """Clean up the target process and temporary file"""
+ with contextlib.suppress(Exception):
+ if process.poll() is None:
+ process.terminate()
+ try:
+ process.wait(timeout=5.0)
+ except subprocess.TimeoutExpired:
+ process.kill()
+ process.wait()
+
+
+def main():
+ """Main benchmark function"""
+ colors = get_colors(can_colorize())
+ args = parse_arguments()
+
+ print(f"{colors.BOLD_MAGENTA}External Inspection Benchmark Tool{colors.RESET}")
+ print(f"{colors.BOLD_MAGENTA}{'=' * 34}{colors.RESET}")
+
+ example_info = CODE_EXAMPLES.get(args.code, {"description": "Unknown"})
+ print(
+ f"\n{colors.CYAN}Code Example:{colors.RESET} {colors.GREEN}{args.code}{colors.RESET}"
+ )
+ print(f"{colors.CYAN}Description:{colors.RESET} {example_info['description']}")
+ print(
+ f"{colors.CYAN}Benchmark Duration:{colors.RESET} {colors.YELLOW}{args.duration}{colors.RESET} seconds"
+ )
+
+ process = None
+ temp_file_path = None
+
+ try:
+ # Create target process
+ print(f"\n{colors.BLUE}Creating and starting target process...{colors.RESET}")
+ with tempfile.NamedTemporaryFile(mode="w", suffix=".py") as temp_file:
+ process, temp_file_path = create_target_process(temp_file, args.code)
+ print(
+ f"{colors.GREEN}Target process started with PID: {colors.BOLD_WHITE}{process.pid}{colors.RESET}"
+ )
+
+ # Run benchmark with specified duration
+ with process:
+ # Create unwinder and run benchmark
+ print(f"{colors.BLUE}Initializing unwinder...{colors.RESET}")
+ try:
+ unwinder = _remote_debugging.RemoteUnwinder(
+ process.pid, all_threads=True
+ )
+ results = benchmark(unwinder, duration_seconds=args.duration)
+ finally:
+ cleanup_process(process, temp_file_path)
+
+ # Print results
+ print_benchmark_results(results)
+
+ except PermissionError as e:
+ print(
+ f"{colors.BOLD_RED}Error: Insufficient permissions to read stack trace: {e}{colors.RESET}"
+ )
+ print(
+ f"{colors.YELLOW}Try running with appropriate privileges (e.g., sudo){colors.RESET}"
+ )
+ return 1
+ except Exception as e:
+ print(f"{colors.BOLD_RED}Error during benchmarking: {e}{colors.RESET}")
+ if process:
+ with contextlib.suppress(Exception):
+ stdout, stderr = process.communicate(timeout=1)
+ if stdout:
+ print(
+ f"{colors.CYAN}Process STDOUT:{colors.RESET} {stdout.decode()}"
+ )
+ if stderr:
+ print(
+ f"{colors.RED}Process STDERR:{colors.RESET} {stderr.decode()}"
+ )
+ raise
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/Tools/msi/freethreaded/freethreaded_files.wxs b/Tools/msi/freethreaded/freethreaded_files.wxs
index 86d9a8b83f6..0707e77b5e9 100644
--- a/Tools/msi/freethreaded/freethreaded_files.wxs
+++ b/Tools/msi/freethreaded/freethreaded_files.wxs
@@ -103,7 +103,7 @@
</ComponentGroup>
</Fragment>
- <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_uuid;_wmi;_zoneinfo;_zstd;_testcapi;_ctypes_test;_testbuffer;_testimportmultiple;_testmultiphase;_testsinglephase;_testconsole;_testinternalcapi;_testclinic;_testclinic_limited;_tkinter ?>
+ <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_remote_debugging;_uuid;_wmi;_zoneinfo;_zstd;_testcapi;_ctypes_test;_testbuffer;_testimportmultiple;_testmultiphase;_testsinglephase;_testconsole;_testinternalcapi;_testclinic;_testclinic_limited;_tkinter ?>
<Fragment>
<DirectoryRef Id="Lib_venv_scripts_nt__freethreaded" />
diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs
index 8439518bcbd..4d44299f783 100644
--- a/Tools/msi/lib/lib_files.wxs
+++ b/Tools/msi/lib/lib_files.wxs
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
- <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_uuid;_wmi;_zoneinfo;_zstd ?>
+ <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_remote_debugging;_uuid;_wmi;_zoneinfo;_zstd ?>
<Fragment>
<DirectoryRef Id="Lib_venv_scripts_nt" />
diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py
index 2be85a163b4..09c5651f24a 100644
--- a/Tools/peg_generator/pegen/c_generator.py
+++ b/Tools/peg_generator/pegen/c_generator.py
@@ -44,7 +44,7 @@ EXTENSION_PREFIX = """\
# define MAXSTACK 4000
# endif
#else
-# define MAXSTACK 4000
+# define MAXSTACK 6000
#endif
"""
diff --git a/Tools/tsan/suppressions_free_threading.txt b/Tools/tsan/suppressions_free_threading.txt
index 3230f969436..93421b623b9 100644
--- a/Tools/tsan/suppressions_free_threading.txt
+++ b/Tools/tsan/suppressions_free_threading.txt
@@ -12,15 +12,12 @@
# These warnings trigger directly in a CPython function.
-race_top:assign_version_tag
-race_top:_Py_slot_tp_getattr_hook
race_top:dump_traceback
race_top:fatal_error
race_top:_PyFrame_GetCode
race_top:_PyFrame_Initialize
race_top:_PyObject_TryGetInstanceAttribute
race_top:PyUnstable_InterpreterFrame_GetLine
-race_top:type_modified_unlocked
race_top:write_thread_id
# gh-129068: race on shared range iterators (test_free_threading.test_zip.ZipThreading.test_threading)
@@ -29,9 +26,6 @@ race_top:rangeiter_next
# gh-129748: test.test_free_threading.test_slots.TestSlots.test_object
race_top:mi_block_set_nextx
-# gh-127266: type slot updates are not thread-safe (test_opcache.test_load_attr_method_lazy_dict)
-race_top:update_one_slot
-
# https://gist.github.com/mpage/6962e8870606cfc960e159b407a0cb40
thread:pthread_create
@@ -49,7 +43,6 @@ race:list_inplace_repeat_lock_held
race:PyObject_Realloc
# gh-133467. Some of these could be hard to trigger.
-race_top:update_one_slot
race_top:_Py_slot_tp_getattr_hook
race_top:slot_tp_descr_get
race_top:type_set_name
diff --git a/configure b/configure
index abdd28fcabf..029bf527da4 100755
--- a/configure
+++ b/configure
@@ -14052,6 +14052,7 @@ fi
+
have_uuid=missing
for ac_header in uuid.h
@@ -14061,6 +14062,7 @@ if test "x$ac_cv_header_uuid_h" = xyes
then :
printf "%s\n" "#define HAVE_UUID_H 1" >>confdefs.h
+
for ac_func in uuid_create uuid_enc_be
do :
as_ac_var=`printf "%s\n" "ac_cv_func_$ac_func" | sed "$as_sed_sh"`
@@ -14070,7 +14072,9 @@ then :
cat >>confdefs.h <<_ACEOF
#define `printf "%s\n" "HAVE_$ac_func" | sed "$as_sed_cpp"` 1
_ACEOF
- have_uuid=yes
+
+ have_uuid=yes
+ ac_cv_have_uuid_h=yes
LIBUUID_CFLAGS=${LIBUUID_CFLAGS-""}
LIBUUID_LIBS=${LIBUUID_LIBS-""}
@@ -14160,6 +14164,7 @@ if test "x$ac_cv_header_uuid_uuid_h" = xyes
then :
printf "%s\n" "#define HAVE_UUID_UUID_H 1" >>confdefs.h
+ ac_cv_have_uuid_uuid_h=yes
py_check_lib_save_LIBS=$LIBS
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for uuid_generate_time in -luuid" >&5
printf %s "checking for uuid_generate_time in -luuid... " >&6; }
@@ -14257,8 +14262,9 @@ fi
printf "%s\n" "$ac_cv_lib_uuid_uuid_generate_time_safe" >&6; }
if test "x$ac_cv_lib_uuid_uuid_generate_time_safe" = xyes
then :
- have_uuid=yes
- printf "%s\n" "#define HAVE_UUID_GENERATE_TIME_SAFE 1" >>confdefs.h
+
+ have_uuid=yes
+ ac_cv_have_uuid_generate_time_safe=yes
fi
@@ -14302,6 +14308,7 @@ if test "x$ac_cv_header_uuid_uuid_h" = xyes
then :
printf "%s\n" "#define HAVE_UUID_UUID_H 1" >>confdefs.h
+ ac_cv_have_uuid_uuid_h=yes
py_check_lib_save_LIBS=$LIBS
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for uuid_generate_time in -luuid" >&5
printf %s "checking for uuid_generate_time in -luuid... " >&6; }
@@ -14399,8 +14406,9 @@ fi
printf "%s\n" "$ac_cv_lib_uuid_uuid_generate_time_safe" >&6; }
if test "x$ac_cv_lib_uuid_uuid_generate_time_safe" = xyes
then :
- have_uuid=yes
- printf "%s\n" "#define HAVE_UUID_GENERATE_TIME_SAFE 1" >>confdefs.h
+
+ have_uuid=yes
+ ac_cv_have_uuid_generate_time_safe=yes
fi
@@ -14431,10 +14439,16 @@ else
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5
printf "%s\n" "yes" >&6; }
have_uuid=yes
- printf "%s\n" "#define HAVE_UUID_H 1" >>confdefs.h
-
- printf "%s\n" "#define HAVE_UUID_GENERATE_TIME_SAFE 1" >>confdefs.h
-
+ ac_cv_have_uuid_generate_time_safe=yes
+ # The uuid.h file to include may be <uuid.h> *or* <uuid/uuid.h>.
+ # Since pkg-config --cflags uuid may return -I/usr/include/uuid,
+ # it's possible to write '#include <uuid.h>' in _uuidmodule.c,
+ # assuming that the compiler flags are properly updated.
+ #
+ # Ideally, we should have defined HAVE_UUID_H if and only if
+ # #include <uuid.h> can be written, *without* assuming extra
+ # include path.
+ ac_cv_have_uuid_h=yes
fi
@@ -14455,6 +14469,7 @@ if test "x$ac_cv_func_uuid_generate_time" = xyes
then :
have_uuid=yes
+ ac_cv_have_uuid_uuid_h=yes
LIBUUID_CFLAGS=${LIBUUID_CFLAGS-""}
LIBUUID_LIBS=${LIBUUID_LIBS-""}
@@ -14467,6 +14482,24 @@ done
fi
+if test "x$ac_cv_have_uuid_h" = xyes
+then :
+ printf "%s\n" "#define HAVE_UUID_H 1" >>confdefs.h
+
+fi
+if test "x$ac_cv_have_uuid_uuid_h" = xyes
+then :
+ printf "%s\n" "#define HAVE_UUID_UUID_H 1" >>confdefs.h
+
+fi
+if test "x$ac_cv_have_uuid_generate_time_safe" = xyes
+then :
+
+ printf "%s\n" "#define HAVE_UUID_GENERATE_TIME_SAFE 1" >>confdefs.h
+
+
+fi
+
# gh-124228: While the libuuid library is available on NetBSD, it supports only UUID version 4.
# This restriction inhibits the proper generation of time-based UUIDs.
if test "$ac_sys_system" = "NetBSD"; then
@@ -14480,6 +14513,164 @@ then :
have_uuid=no
fi
+# gh-132710: The UUID node is fetched by using libuuid when possible
+# and cached. While the node is constant within the same process,
+# different interpreters may have different values as libuuid may
+# randomize the node value if the latter cannot be deduced.
+#
+# Consumers may define HAVE_UUID_GENERATE_TIME_SAFE_STABLE_MAC
+# to indicate that libuuid is unstable and should not be relied
+# upon to deduce the MAC address.
+
+
+if test "$have_uuid" = "yes" -a "$HAVE_UUID_GENERATE_TIME_SAFE" = "1"
+then
+ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking if uuid_generate_time_safe() node value is stable" >&5
+printf %s "checking if uuid_generate_time_safe() node value is stable... " >&6; }
+ save_CFLAGS=$CFLAGS
+save_CPPFLAGS=$CPPFLAGS
+save_LDFLAGS=$LDFLAGS
+save_LIBS=$LIBS
+
+
+ # Be sure to add the extra include path if we used pkg-config
+ # as HAVE_UUID_H may be set even though <uuid.h> is only reachable
+ # by adding extra -I flags.
+ #
+ # If the following script does not compile, we simply assume that
+ # libuuid is missing.
+ CFLAGS="$CFLAGS $LIBUUID_CFLAGS"
+ LIBS="$LIBS $LIBUUID_LIBS"
+ if test "$cross_compiling" = yes
+then :
+
+
+else case e in #(
+ e) cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+ #include <inttypes.h> // PRIu64
+ #include <stdint.h> // uint64_t
+ #include <stdio.h> // fopen(), fclose()
+
+ #ifdef HAVE_UUID_H
+ #include <uuid.h>
+ #else
+ #include <uuid/uuid.h>
+ #endif
+
+ #define ERR 1
+ int main(void) {
+ uuid_t uuid; // unsigned char[16]
+ (void)uuid_generate_time_safe(uuid);
+ uint64_t node = 0;
+ for (size_t i = 0; i < 6; i++) {
+ node |= (uint64_t)uuid[15 - i] << (8 * i);
+ }
+ FILE *fp = fopen("conftest.out", "w");
+ if (fp == NULL) {
+ return ERR;
+ }
+ int rc = fprintf(fp, "%" PRIu64 "\n", node) >= 0;
+ rc |= fclose(fp);
+ return rc == 0 ? 0 : ERR;
+ }
+_ACEOF
+if ac_fn_c_try_run "$LINENO"
+then :
+
+ py_cv_uuid_node1=`cat conftest.out`
+
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+ conftest.$ac_objext conftest.beam conftest.$ac_ext ;;
+esac
+fi
+
+CFLAGS=$save_CFLAGS
+CPPFLAGS=$save_CPPFLAGS
+LDFLAGS=$save_LDFLAGS
+LIBS=$save_LIBS
+
+
+ save_CFLAGS=$CFLAGS
+save_CPPFLAGS=$CPPFLAGS
+save_LDFLAGS=$LDFLAGS
+save_LIBS=$LIBS
+
+
+ # Be sure to add the extra include path if we used pkg-config
+ # as HAVE_UUID_H may be set even though <uuid.h> is only reachable
+ # by adding extra -I flags.
+ #
+ # If the following script does not compile, we simply assume that
+ # libuuid is missing.
+ CFLAGS="$CFLAGS $LIBUUID_CFLAGS"
+ LIBS="$LIBS $LIBUUID_LIBS"
+ if test "$cross_compiling" = yes
+then :
+
+
+else case e in #(
+ e) cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+/* end confdefs.h. */
+
+ #include <inttypes.h> // PRIu64
+ #include <stdint.h> // uint64_t
+ #include <stdio.h> // fopen(), fclose()
+
+ #ifdef HAVE_UUID_H
+ #include <uuid.h>
+ #else
+ #include <uuid/uuid.h>
+ #endif
+
+ #define ERR 1
+ int main(void) {
+ uuid_t uuid; // unsigned char[16]
+ (void)uuid_generate_time_safe(uuid);
+ uint64_t node = 0;
+ for (size_t i = 0; i < 6; i++) {
+ node |= (uint64_t)uuid[15 - i] << (8 * i);
+ }
+ FILE *fp = fopen("conftest.out", "w");
+ if (fp == NULL) {
+ return ERR;
+ }
+ int rc = fprintf(fp, "%" PRIu64 "\n", node) >= 0;
+ rc |= fclose(fp);
+ return rc == 0 ? 0 : ERR;
+ }
+_ACEOF
+if ac_fn_c_try_run "$LINENO"
+then :
+
+ py_cv_uuid_node2=`cat conftest.out`
+
+fi
+rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
+ conftest.$ac_objext conftest.beam conftest.$ac_ext ;;
+esac
+fi
+
+CFLAGS=$save_CFLAGS
+CPPFLAGS=$save_CPPFLAGS
+LDFLAGS=$save_LDFLAGS
+LIBS=$save_LIBS
+
+
+ if test -n "$py_cv_uuid_node1" -a "$py_cv_uuid_node1" = "$py_cv_uuid_node2"
+ then
+ printf "%s\n" "#define HAVE_UUID_GENERATE_TIME_SAFE_STABLE_MAC 1" >>confdefs.h
+
+ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: stable" >&5
+printf "%s\n" "stable" >&6; }
+ else
+ { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: unstable" >&5
+printf "%s\n" "unstable" >&6; }
+ fi
+fi
+
# 'Real Time' functions on Solaris
# posix4 on Solaris 2.6
# pthread (first!) on Linux
@@ -19268,6 +19459,12 @@ then :
printf "%s\n" "#define HAVE_GETLOGIN 1" >>confdefs.h
fi
+ac_fn_c_check_func "$LINENO" "getlogin_r" "ac_cv_func_getlogin_r"
+if test "x$ac_cv_func_getlogin_r" = xyes
+then :
+ printf "%s\n" "#define HAVE_GETLOGIN_R 1" >>confdefs.h
+
+fi
ac_fn_c_check_func "$LINENO" "getpeername" "ac_cv_func_getpeername"
if test "x$ac_cv_func_getpeername" = xyes
then :
@@ -23652,6 +23849,33 @@ fi
+ac_fn_check_decl "$LINENO" "MAXLOGNAME" "ac_cv_have_decl_MAXLOGNAME" "#include <sys/params.h>
+" "$ac_c_undeclared_builtin_options" "CFLAGS"
+if test "x$ac_cv_have_decl_MAXLOGNAME" = xyes
+then :
+
+printf "%s\n" "#define HAVE_MAXLOGNAME 1" >>confdefs.h
+
+fi
+
+ac_fn_check_decl "$LINENO" "UT_NAMESIZE" "ac_cv_have_decl_UT_NAMESIZE" "#include <utmp.h>
+" "$ac_c_undeclared_builtin_options" "CFLAGS"
+if test "x$ac_cv_have_decl_UT_NAMESIZE" = xyes
+then :
+ ac_have_decl=1
+else case e in #(
+ e) ac_have_decl=0 ;;
+esac
+fi
+printf "%s\n" "#define HAVE_DECL_UT_NAMESIZE $ac_have_decl" >>confdefs.h
+if test $ac_have_decl = 1
+then :
+
+printf "%s\n" "#define HAVE_UT_NAMESIZE 1" >>confdefs.h
+
+fi
+
+
# check for openpty, login_tty, and forkpty
@@ -32366,6 +32590,14 @@ LIBHACL_CFLAGS="${LIBHACL_FLAG_I} ${LIBHACL_FLAG_D} \$(PY_STDMODULE_CFLAGS) \$(C
LIBHACL_LDFLAGS= # for now, no specific linker flags are needed
+if test "$UNIVERSAL_ARCHS" = "universal2" -o \
+ \( "$build_cpu" = "aarch64" -a "$build_vendor" = "apple" \)
+then
+ use_hacl_universal2_impl=yes
+else
+ use_hacl_universal2_impl=no
+fi
+
# The SIMD files use aligned_alloc, which is not available on older versions of
# Android.
# The *mmintrin.h headers are x86-family-specific, so can't be used on WASI.
@@ -32419,7 +32651,7 @@ printf "%s\n" "#define HACL_CAN_COMPILE_SIMD128 1" >>confdefs.h
# isn't great, so it's disabled on ARM64.
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for HACL* SIMD128 implementation" >&5
printf %s "checking for HACL* SIMD128 implementation... " >&6; }
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
LIBHACL_BLAKE2_SIMD128_OBJS="Modules/_hacl/Hacl_Hash_Blake2s_Simd128_universal2.o"
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: universal2" >&5
printf "%s\n" "universal2" >&6; }
@@ -32496,7 +32728,7 @@ printf "%s\n" "#define HACL_CAN_COMPILE_SIMD256 1" >>confdefs.h
# wrapped implementation if we're building for universal2.
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for HACL* SIMD256 implementation" >&5
printf %s "checking for HACL* SIMD256 implementation... " >&6; }
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
LIBHACL_BLAKE2_SIMD256_OBJS="Modules/_hacl/Hacl_Hash_Blake2b_Simd256_universal2.o"
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: universal2" >&5
printf "%s\n" "universal2" >&6; }
diff --git a/configure.ac b/configure.ac
index 8d939f07505..371b2e8ed73 100644
--- a/configure.ac
+++ b/configure.ac
@@ -3740,15 +3740,17 @@ dnl check for uuid dependencies
AH_TEMPLATE([HAVE_UUID_H], [Define to 1 if you have the <uuid.h> header file.])
AH_TEMPLATE([HAVE_UUID_UUID_H], [Define to 1 if you have the <uuid/uuid.h> header file.])
AH_TEMPLATE([HAVE_UUID_GENERATE_TIME_SAFE], [Define if uuid_generate_time_safe() exists.])
+AH_TEMPLATE([HAVE_UUID_GENERATE_TIME_SAFE_STABLE_MAC], [Define if uuid_generate_time_safe() is able to deduce a MAC address.])
have_uuid=missing
dnl AIX provides support for RFC4122 (uuid) in libc.a starting with AIX 6.1
dnl (anno 2007). FreeBSD and OpenBSD provides support in libc as well.
dnl Little-endian FreeBSD, OpenBSD and NetBSD needs encoding into an octet
dnl stream in big-endian byte-order
-AC_CHECK_HEADERS([uuid.h],
- [AC_CHECK_FUNCS([uuid_create uuid_enc_be],
- [have_uuid=yes
+AC_CHECK_HEADERS([uuid.h], [
+ AC_CHECK_FUNCS([uuid_create uuid_enc_be], [
+ have_uuid=yes
+ ac_cv_have_uuid_h=yes
LIBUUID_CFLAGS=${LIBUUID_CFLAGS-""}
LIBUUID_LIBS=${LIBUUID_LIBS-""}
])
@@ -3758,19 +3760,29 @@ AS_VAR_IF([have_uuid], [missing], [
PKG_CHECK_MODULES(
[LIBUUID], [uuid >= 2.20],
[dnl linux-util's libuuid has uuid_generate_time_safe() since v2.20 (2011)
- dnl and provides <uuid.h>.
+ dnl and provides <uuid.h> assuming specific include paths are given
have_uuid=yes
- AC_DEFINE([HAVE_UUID_H], [1])
- AC_DEFINE([HAVE_UUID_GENERATE_TIME_SAFE], [1])
+ ac_cv_have_uuid_generate_time_safe=yes
+ # The uuid.h file to include may be <uuid.h> *or* <uuid/uuid.h>.
+ # Since pkg-config --cflags uuid may return -I/usr/include/uuid,
+ # it's possible to write '#include <uuid.h>' in _uuidmodule.c,
+ # assuming that the compiler flags are properly updated.
+ #
+ # Ideally, we should have defined HAVE_UUID_H if and only if
+ # #include <uuid.h> can be written, *without* assuming extra
+ # include path.
+ ac_cv_have_uuid_h=yes
], [
WITH_SAVE_ENV([
CPPFLAGS="$CPPFLAGS $LIBUUID_CFLAGS"
LIBS="$LIBS $LIBUUID_LIBS"
AC_CHECK_HEADERS([uuid/uuid.h], [
+ ac_cv_have_uuid_uuid_h=yes
PY_CHECK_LIB([uuid], [uuid_generate_time], [have_uuid=yes])
- PY_CHECK_LIB([uuid], [uuid_generate_time_safe],
- [have_uuid=yes
- AC_DEFINE([HAVE_UUID_GENERATE_TIME_SAFE], [1]) ]) ])
+ PY_CHECK_LIB([uuid], [uuid_generate_time_safe], [
+ have_uuid=yes
+ ac_cv_have_uuid_generate_time_safe=yes
+ ])])
AS_VAR_IF([have_uuid], [yes], [
LIBUUID_CFLAGS=${LIBUUID_CFLAGS-""}
LIBUUID_LIBS=${LIBUUID_LIBS-"-luuid"}
@@ -3785,12 +3797,19 @@ AS_VAR_IF([have_uuid], [missing], [
AC_CHECK_HEADERS([uuid/uuid.h], [
AC_CHECK_FUNC([uuid_generate_time], [
have_uuid=yes
+ ac_cv_have_uuid_uuid_h=yes
LIBUUID_CFLAGS=${LIBUUID_CFLAGS-""}
LIBUUID_LIBS=${LIBUUID_LIBS-""}
])
])
])
+AS_VAR_IF([ac_cv_have_uuid_h], [yes], [AC_DEFINE([HAVE_UUID_H], [1])])
+AS_VAR_IF([ac_cv_have_uuid_uuid_h], [yes], [AC_DEFINE([HAVE_UUID_UUID_H], [1])])
+AS_VAR_IF([ac_cv_have_uuid_generate_time_safe], [yes], [
+ AC_DEFINE([HAVE_UUID_GENERATE_TIME_SAFE], [1])
+])
+
# gh-124228: While the libuuid library is available on NetBSD, it supports only UUID version 4.
# This restriction inhibits the proper generation of time-based UUIDs.
if test "$ac_sys_system" = "NetBSD"; then
@@ -3800,6 +3819,68 @@ fi
AS_VAR_IF([have_uuid], [missing], [have_uuid=no])
+# gh-132710: The UUID node is fetched by using libuuid when possible
+# and cached. While the node is constant within the same process,
+# different interpreters may have different values as libuuid may
+# randomize the node value if the latter cannot be deduced.
+#
+# Consumers may define HAVE_UUID_GENERATE_TIME_SAFE_STABLE_MAC
+# to indicate that libuuid is unstable and should not be relied
+# upon to deduce the MAC address.
+AC_DEFUN([PY_EXTRACT_UUID_GENERATE_TIME_SAFE_MAC], [WITH_SAVE_ENV([
+ # Be sure to add the extra include path if we used pkg-config
+ # as HAVE_UUID_H may be set even though <uuid.h> is only reachable
+ # by adding extra -I flags.
+ #
+ # If the following script does not compile, we simply assume that
+ # libuuid is missing.
+ CFLAGS="$CFLAGS $LIBUUID_CFLAGS"
+ LIBS="$LIBS $LIBUUID_LIBS"
+ AC_RUN_IFELSE([AC_LANG_SOURCE([[
+ #include <inttypes.h> // PRIu64
+ #include <stdint.h> // uint64_t
+ #include <stdio.h> // fopen(), fclose()
+
+ #ifdef HAVE_UUID_H
+ #include <uuid.h>
+ #else
+ #include <uuid/uuid.h>
+ #endif
+
+ #define ERR 1
+ int main(void) {
+ uuid_t uuid; // unsigned char[16]
+ (void)uuid_generate_time_safe(uuid);
+ uint64_t node = 0;
+ for (size_t i = 0; i < 6; i++) {
+ node |= (uint64_t)uuid[15 - i] << (8 * i);
+ }
+ FILE *fp = fopen("conftest.out", "w");
+ if (fp == NULL) {
+ return ERR;
+ }
+ int rc = fprintf(fp, "%" PRIu64 "\n", node) >= 0;
+ rc |= fclose(fp);
+ return rc == 0 ? 0 : ERR;
+ }]])], [
+ AS_VAR_SET([$1], [`cat conftest.out`])
+ ], [], []
+ )])])
+
+if test "$have_uuid" = "yes" -a "$HAVE_UUID_GENERATE_TIME_SAFE" = "1"
+then
+ AC_MSG_CHECKING([if uuid_generate_time_safe() node value is stable])
+ PY_EXTRACT_UUID_GENERATE_TIME_SAFE_MAC([py_cv_uuid_node1])
+ PY_EXTRACT_UUID_GENERATE_TIME_SAFE_MAC([py_cv_uuid_node2])
+ if test -n "$py_cv_uuid_node1" -a "$py_cv_uuid_node1" = "$py_cv_uuid_node2"
+ then
+ AC_DEFINE([HAVE_UUID_GENERATE_TIME_SAFE_STABLE_MAC], [1])
+ AC_MSG_RESULT([stable])
+ else
+ AC_MSG_RESULT([unstable])
+ fi
+fi
+
# 'Real Time' functions on Solaris
# posix4 on Solaris 2.6
# pthread (first!) on Linux
@@ -5138,7 +5219,7 @@ AC_CHECK_FUNCS([ \
faccessat fchmod fchmodat fchown fchownat fdopendir fdwalk fexecve \
fork fork1 fpathconf fstatat ftime ftruncate futimens futimes futimesat \
gai_strerror getegid geteuid getgid getgrent getgrgid getgrgid_r \
- getgrnam_r getgrouplist gethostname getitimer getloadavg getlogin \
+ getgrnam_r getgrouplist gethostname getitimer getloadavg getlogin getlogin_r \
getpeername getpgid getpid getppid getpriority _getpty \
getpwent getpwnam_r getpwuid getpwuid_r getresgid getresuid getrusage getsid getspent \
getspnam getuid getwd grantpt if_nameindex initgroups kill killpg lchown linkat \
@@ -5457,6 +5538,18 @@ PY_CHECK_FUNC([setgroups], [
#endif
])
+AC_CHECK_DECL([MAXLOGNAME],
+ [AC_DEFINE([HAVE_MAXLOGNAME], [1],
+ [Define if you have the 'MAXLOGNAME' constant.])],
+ [],
+ [@%:@include <sys/params.h>])
+
+AC_CHECK_DECLS([UT_NAMESIZE],
+ [AC_DEFINE([HAVE_UT_NAMESIZE], [1],
+ [Define if you have the 'HAVE_UT_NAMESIZE' constant.])],
+ [],
+ [@%:@include <utmp.h>])
+
# check for openpty, login_tty, and forkpty
AC_CHECK_FUNCS([openpty], [],
@@ -7920,6 +8013,15 @@ AC_SUBST([LIBHACL_CFLAGS])
LIBHACL_LDFLAGS= # for now, no specific linker flags are needed
AC_SUBST([LIBHACL_LDFLAGS])
+dnl Check if universal2 HACL* implementation should be used.
+if test "$UNIVERSAL_ARCHS" = "universal2" -o \
+ \( "$build_cpu" = "aarch64" -a "$build_vendor" = "apple" \)
+then
+ use_hacl_universal2_impl=yes
+else
+ use_hacl_universal2_impl=no
+fi
+
# The SIMD files use aligned_alloc, which is not available on older versions of
# Android.
# The *mmintrin.h headers are x86-family-specific, so can't be used on WASI.
@@ -7936,7 +8038,7 @@ then
# available on x86_64. However, performance of the HACL SIMD128 implementation
# isn't great, so it's disabled on ARM64.
AC_MSG_CHECKING([for HACL* SIMD128 implementation])
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
[LIBHACL_BLAKE2_SIMD128_OBJS="Modules/_hacl/Hacl_Hash_Blake2s_Simd128_universal2.o"]
AC_MSG_RESULT([universal2])
else
@@ -7968,7 +8070,7 @@ then
# implementation requires symbols that aren't available on ARM64. Use a
# wrapped implementation if we're building for universal2.
AC_MSG_CHECKING([for HACL* SIMD256 implementation])
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
[LIBHACL_BLAKE2_SIMD256_OBJS="Modules/_hacl/Hacl_Hash_Blake2b_Simd256_universal2.o"]
AC_MSG_RESULT([universal2])
else
diff --git a/iOS/testbed/__main__.py b/iOS/testbed/__main__.py
index c05497ede3a..1146bf3b988 100644
--- a/iOS/testbed/__main__.py
+++ b/iOS/testbed/__main__.py
@@ -127,7 +127,7 @@ async def async_check_output(*args, **kwargs):
async def select_simulator_device():
# List the testing simulators, in JSON format
raw_json = await async_check_output(
- "xcrun", "simctl", "--set", "testing", "list", "-j"
+ "xcrun", "simctl", "list", "-j"
)
json_data = json.loads(raw_json)
diff --git a/pyconfig.h.in b/pyconfig.h.in
index c91facbedf9..65a2c55217c 100644
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -267,6 +267,10 @@
*/
#undef HAVE_DECL_TZNAME
+/* Define to 1 if you have the declaration of 'UT_NAMESIZE', and to 0 if you
+ don't. */
+#undef HAVE_DECL_UT_NAMESIZE
+
/* Define to 1 if you have the device macros. */
#undef HAVE_DEVICE_MACROS
@@ -539,6 +543,9 @@
/* Define to 1 if you have the 'getlogin' function. */
#undef HAVE_GETLOGIN
+/* Define to 1 if you have the 'getlogin_r' function. */
+#undef HAVE_GETLOGIN_R
+
/* Define to 1 if you have the 'getnameinfo' function. */
#undef HAVE_GETNAMEINFO
@@ -807,6 +814,9 @@
/* Define this if you have the makedev macro. */
#undef HAVE_MAKEDEV
+/* Define if you have the 'MAXLOGNAME' constant. */
+#undef HAVE_MAXLOGNAME
+
/* Define to 1 if you have the 'mbrtowc' function. */
#undef HAVE_MBRTOWC
@@ -1575,6 +1585,9 @@
/* Define to 1 if you have the <utmp.h> header file. */
#undef HAVE_UTMP_H
+/* Define if you have the 'HAVE_UT_NAMESIZE' constant. */
+#undef HAVE_UT_NAMESIZE
+
/* Define to 1 if you have the 'uuid_create' function. */
#undef HAVE_UUID_CREATE
@@ -1584,6 +1597,9 @@
/* Define if uuid_generate_time_safe() exists. */
#undef HAVE_UUID_GENERATE_TIME_SAFE
+/* Define if uuid_generate_time_safe() is able to deduce a MAC address. */
+#undef HAVE_UUID_GENERATE_TIME_SAFE_STABLE_MAC
+
/* Define to 1 if you have the <uuid.h> header file. */
#undef HAVE_UUID_H