aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
context:
space:
mode:
-rw-r--r--.devcontainer/devcontainer.json2
-rw-r--r--.gitignore5
-rw-r--r--Android/README.md4
-rw-r--r--Android/android-env.sh6
-rwxr-xr-xAndroid/android.py231
-rw-r--r--Android/testbed/app/build.gradle.kts20
-rw-r--r--Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt8
-rw-r--r--Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt27
-rw-r--r--Android/testbed/app/src/main/python/android_testbed_main.py (renamed from Android/testbed/app/src/main/python/main.py)20
-rw-r--r--Android/testbed/build.gradle.kts2
-rw-r--r--Android/testbed/gradle/wrapper/gradle-wrapper.properties2
-rw-r--r--Doc/c-api/lifecycle.rst6
-rw-r--r--Doc/c-api/module.rst42
-rw-r--r--Doc/c-api/stable.rst1
-rw-r--r--Doc/c-api/unicode.rst4
-rw-r--r--Doc/deprecations/pending-removal-in-3.19.rst16
-rw-r--r--Doc/extending/windows.rst2
-rw-r--r--Doc/howto/isolating-extensions.rst4
-rw-r--r--Doc/howto/urllib2.rst86
-rw-r--r--Doc/library/argparse.rst15
-rw-r--r--Doc/library/ast.rst303
-rw-r--r--Doc/library/compileall.rst13
-rw-r--r--Doc/library/compression.zstd.rst51
-rw-r--r--Doc/library/csv.rst10
-rw-r--r--Doc/library/dbm.rst43
-rw-r--r--Doc/library/doctest.rst133
-rw-r--r--Doc/library/hashlib.rst7
-rw-r--r--Doc/library/math.rst20
-rw-r--r--Doc/library/os.path.rst32
-rw-r--r--Doc/library/shelve.rst16
-rw-r--r--Doc/library/socket.rst6
-rw-r--r--Doc/library/string.rst2
-rw-r--r--Doc/library/tarfile.rst20
-rw-r--r--Doc/using/android.rst9
-rw-r--r--Doc/whatsnew/3.14.rst15
-rw-r--r--Doc/whatsnew/3.15.rst76
-rw-r--r--Include/abstract.h14
-rw-r--r--Include/boolobject.h13
-rw-r--r--Include/internal/mimalloc/mimalloc/internal.h4
-rw-r--r--Include/internal/mimalloc/mimalloc/types.h29
-rw-r--r--Include/internal/pycore_ceval.h3
-rw-r--r--Include/internal/pycore_crossinterp.h35
-rw-r--r--Include/internal/pycore_lock.h3
-rw-r--r--Include/internal/pycore_magic_number.h3
-rw-r--r--Include/internal/pycore_stackref.h42
-rw-r--r--Include/object.h9
-rw-r--r--InternalDocs/exception_handling.md10
-rw-r--r--Lib/_pydecimal.py4
-rw-r--r--Lib/argparse.py2
-rw-r--r--Lib/ast.py28
-rw-r--r--Lib/code.py2
-rw-r--r--Lib/dbm/dumb.py32
-rw-r--r--Lib/dbm/sqlite3.py4
-rw-r--r--Lib/doctest.py113
-rw-r--r--Lib/email/_header_value_parser.py4
-rw-r--r--Lib/fractions.py4
-rw-r--r--Lib/genericpath.py11
-rw-r--r--Lib/hashlib.py12
-rw-r--r--Lib/idlelib/News3.txt7
-rw-r--r--Lib/json/encoder.py5
-rw-r--r--Lib/ntpath.py38
-rw-r--r--Lib/posixpath.py57
-rw-r--r--Lib/shelve.py5
-rw-r--r--Lib/tarfile.py163
-rw-r--r--Lib/test/_code_definitions.py20
-rw-r--r--Lib/test/libregrtest/main.py7
-rw-r--r--Lib/test/libregrtest/setup.py9
-rw-r--r--Lib/test/subprocessdata/fd_status.py4
-rw-r--r--Lib/test/support/__init__.py4
-rw-r--r--Lib/test/support/interpreters/__init__.py31
-rw-r--r--Lib/test/test_ast/test_ast.py95
-rw-r--r--Lib/test/test_asyncgen.py9
-rw-r--r--Lib/test/test_capi/test_opt.py11
-rw-r--r--Lib/test/test_class.py1
-rw-r--r--Lib/test/test_code.py48
-rw-r--r--Lib/test/test_csv.py48
-rw-r--r--Lib/test/test_dbm.py61
-rw-r--r--Lib/test/test_dbm_gnu.py27
-rw-r--r--Lib/test/test_decimal.py2
-rw-r--r--Lib/test/test_dis.py2
-rw-r--r--Lib/test/test_doctest/test_doctest.py444
-rw-r--r--Lib/test/test_email/test__header_value_parser.py45
-rw-r--r--Lib/test/test_external_inspection.py76
-rw-r--r--Lib/test/test_fractions.py8
-rw-r--r--Lib/test/test_free_threading/test_itertools.py (renamed from Lib/test/test_free_threading/test_itertools_batched.py)32
-rw-r--r--Lib/test/test_grammar.py14
-rw-r--r--Lib/test/test_hashlib.py95
-rw-r--r--Lib/test/test_interpreters/test_api.py541
-rw-r--r--Lib/test/test_io.py31
-rw-r--r--Lib/test/test_json/test_dump.py8
-rw-r--r--Lib/test/test_math.py22
-rw-r--r--Lib/test/test_monitoring.py15
-rw-r--r--Lib/test/test_ntpath.py214
-rw-r--r--Lib/test/test_peepholer.py92
-rw-r--r--Lib/test/test_posixpath.py252
-rw-r--r--Lib/test/test_pyrepl/test_pyrepl.py11
-rw-r--r--Lib/test/test_queue.py20
-rw-r--r--Lib/test/test_random.py28
-rw-r--r--Lib/test/test_regrtest.py49
-rw-r--r--Lib/test/test_ssl.py43
-rw-r--r--Lib/test/test_sys.py26
-rw-r--r--Lib/test/test_tarfile.py310
-rw-r--r--Lib/test/test_traceback.py29
-rw-r--r--Lib/test/test_zstd.py207
-rw-r--r--Lib/traceback.py13
-rw-r--r--Misc/ACKS1
-rw-r--r--Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst3
-rw-r--r--Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst3
-rw-r--r--Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst2
-rw-r--r--Misc/NEWS.d/next/C_API/2025-06-05-11-06-07.gh-issue-134989.74p4ud.rst3
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst1
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst1
-rw-r--r--Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-18-09-54.gh-issue-134889.Ic9UM-.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-18-23-46-21.gh-issue-134152.30HwbX.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst5
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst2
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst1
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst7
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst3
-rw-r--r--Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst3
-rw-r--r--Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst6
-rw-r--r--Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst2
-rw-r--r--Modules/_csv.c73
-rw-r--r--Modules/_gdbmmodule.c8
-rw-r--r--Modules/_hashopenssl.c153
-rw-r--r--Modules/_interpchannelsmodule.c8
-rw-r--r--Modules/_interpqueuesmodule.c8
-rw-r--r--Modules/_interpretersmodule.c414
-rw-r--r--Modules/_io/textio.c15
-rw-r--r--Modules/_json.c33
-rw-r--r--Modules/_randommodule.c20
-rw-r--r--Modules/_remote_debugging_module.c8
-rw-r--r--Modules/_testinternalcapi.c3
-rw-r--r--Modules/_zstd/_zstdmodule.c89
-rw-r--r--Modules/_zstd/_zstdmodule.h11
-rw-r--r--Modules/_zstd/clinic/compressor.c.h41
-rw-r--r--Modules/_zstd/compressor.c169
-rw-r--r--Modules/_zstd/decompressor.c67
-rw-r--r--Modules/_zstd/zstddict.c1
-rw-r--r--Modules/blake2module.c30
-rw-r--r--Modules/clinic/_hashopenssl.c.h441
-rw-r--r--Modules/clinic/_randommodule.c.h10
-rw-r--r--Modules/clinic/blake2module.c.h98
-rw-r--r--Modules/clinic/mathmodule.c.h70
-rw-r--r--Modules/clinic/md5module.c.h34
-rw-r--r--Modules/clinic/sha1module.c.h34
-rw-r--r--Modules/clinic/sha2module.c.h130
-rw-r--r--Modules/clinic/sha3module.c.h128
-rw-r--r--Modules/hashlib.h38
-rw-r--r--Modules/itertoolsmodule.c27
-rw-r--r--Modules/mathmodule.c40
-rw-r--r--Modules/md5module.c13
-rw-r--r--Modules/posixmodule.c24
-rw-r--r--Modules/sha1module.c12
-rw-r--r--Modules/sha2module.c49
-rw-r--r--Modules/sha3module.c21
-rw-r--r--Objects/genobject.c49
-rw-r--r--Objects/longobject.c13
-rw-r--r--PCbuild/_testclinic_limited.vcxproj1
-rw-r--r--PCbuild/pyproject.props11
-rw-r--r--Parser/parser.c2
-rw-r--r--Python/asm_trampoline.S22
-rw-r--r--Python/bytecodes.c97
-rw-r--r--Python/ceval.c30
-rw-r--r--Python/crossinterp.c449
-rw-r--r--Python/executor_cases.c.h25
-rw-r--r--Python/flowgraph.c20
-rw-r--r--Python/generated_cases.c.h93
-rw-r--r--Python/import.c4
-rw-r--r--Python/lock.c3
-rw-r--r--Python/optimizer_bytecodes.c11
-rw-r--r--Python/optimizer_cases.c.h12
-rw-r--r--Python/parking_lot.c22
-rw-r--r--Python/perf_jit_trampoline.c4
-rw-r--r--Python/stackrefs.c4
-rw-r--r--Python/thread.c82
-rw-r--r--Python/thread_nt.h92
-rw-r--r--Python/thread_pthread.h392
-rw-r--r--Tools/cases_generator/analyzer.py2
-rw-r--r--Tools/cases_generator/stack.py2
-rw-r--r--Tools/msi/freethreaded/freethreaded_files.wxs2
-rw-r--r--Tools/msi/lib/lib_files.wxs2
-rw-r--r--Tools/peg_generator/pegen/c_generator.py2
-rwxr-xr-xconfigure45
-rw-r--r--configure.ac27
-rw-r--r--iOS/testbed/__main__.py2
-rw-r--r--pyconfig.h.in13
200 files changed, 6033 insertions, 2815 deletions
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index fd1d7151518..8e09808f08b 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,5 +1,5 @@
{
- "image": "ghcr.io/python/devcontainer:2025.05.25.15232270922",
+ "image": "ghcr.io/python/devcontainer:2025.05.29.15334414373",
"onCreateCommand": [
// Install common tooling.
"dnf",
diff --git a/.gitignore b/.gitignore
index 2a6f249275c..cdb0352e0a8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -171,5 +171,10 @@ Python/frozen_modules/MANIFEST
/python
!/Python/
+# People's custom https://docs.anthropic.com/en/docs/claude-code/memory configs.
+/.claude/
+CLAUDE.local.md
+
+#### main branch only stuff below this line, things to backport go above. ####
# main branch only: ABI files are not checked/maintained.
Doc/data/python*.abi
diff --git a/Android/README.md b/Android/README.md
index 6cabd6ba5d6..c42eb627006 100644
--- a/Android/README.md
+++ b/Android/README.md
@@ -156,6 +156,10 @@ repository's `Lib` directory will be picked up immediately. Changes in C files,
and architecture-specific files such as sysconfigdata, will not take effect
until you re-run `android.py make-host` or `build`.
+The testbed app can also be used to test third-party packages. For more details,
+run `android.py test --help`, paying attention to the options `--site-packages`,
+`--cwd`, `-c` and `-m`.
+
## Using in your own app
diff --git a/Android/android-env.sh b/Android/android-env.sh
index bab4130c9e9..7b381a013cf 100644
--- a/Android/android-env.sh
+++ b/Android/android-env.sh
@@ -3,7 +3,7 @@
: "${HOST:?}" # GNU target triplet
# You may also override the following:
-: "${api_level:=24}" # Minimum Android API level the build will run on
+: "${ANDROID_API_LEVEL:=24}" # Minimum Android API level the build will run on
: "${PREFIX:-}" # Path in which to find required libraries
@@ -24,7 +24,7 @@ fail() {
# * https://android.googlesource.com/platform/ndk/+/ndk-rXX-release/docs/BuildSystemMaintainers.md
# where XX is the NDK version. Do a diff against the version you're upgrading from, e.g.:
# https://android.googlesource.com/platform/ndk/+/ndk-r25-release..ndk-r26-release/docs/BuildSystemMaintainers.md
-ndk_version=27.1.12297006
+ndk_version=27.2.12479018
ndk=$ANDROID_HOME/ndk/$ndk_version
if ! [ -e "$ndk" ]; then
@@ -43,7 +43,7 @@ fi
toolchain=$(echo "$ndk"/toolchains/llvm/prebuilt/*)
export AR="$toolchain/bin/llvm-ar"
export AS="$toolchain/bin/llvm-as"
-export CC="$toolchain/bin/${clang_triplet}${api_level}-clang"
+export CC="$toolchain/bin/${clang_triplet}${ANDROID_API_LEVEL}-clang"
export CXX="${CC}++"
export LD="$toolchain/bin/ld"
export NM="$toolchain/bin/llvm-nm"
diff --git a/Android/android.py b/Android/android.py
index 3f48b42aa17..551168fc4b2 100755
--- a/Android/android.py
+++ b/Android/android.py
@@ -14,7 +14,7 @@ from asyncio import wait_for
from contextlib import asynccontextmanager
from datetime import datetime, timezone
from glob import glob
-from os.path import basename, relpath
+from os.path import abspath, basename, relpath
from pathlib import Path
from subprocess import CalledProcessError
from tempfile import TemporaryDirectory
@@ -22,9 +22,13 @@ from tempfile import TemporaryDirectory
SCRIPT_NAME = Path(__file__).name
ANDROID_DIR = Path(__file__).resolve().parent
-CHECKOUT = ANDROID_DIR.parent
+PYTHON_DIR = ANDROID_DIR.parent
+in_source_tree = (
+ ANDROID_DIR.name == "Android" and (PYTHON_DIR / "pyconfig.h.in").exists()
+)
+
TESTBED_DIR = ANDROID_DIR / "testbed"
-CROSS_BUILD_DIR = CHECKOUT / "cross-build"
+CROSS_BUILD_DIR = PYTHON_DIR / "cross-build"
HOSTS = ["aarch64-linux-android", "x86_64-linux-android"]
APP_ID = "org.python.testbed"
@@ -76,39 +80,68 @@ def run(command, *, host=None, env=None, log=True, **kwargs):
kwargs.setdefault("check", True)
if env is None:
env = os.environ.copy()
- original_env = env.copy()
if host:
- env_script = ANDROID_DIR / "android-env.sh"
- env_output = subprocess.run(
- f"set -eu; "
- f"HOST={host}; "
- f"PREFIX={subdir(host)}/prefix; "
- f". {env_script}; "
- f"export",
- check=True, shell=True, text=True, stdout=subprocess.PIPE
- ).stdout
-
- for line in env_output.splitlines():
- # We don't require every line to match, as there may be some other
- # output from installing the NDK.
- if match := re.search(
- "^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line
- ):
- key, value = match[2], match[3]
- if env.get(key) != value:
- print(line)
- env[key] = value
-
- if env == original_env:
- raise ValueError(f"Found no variables in {env_script.name} output:\n"
- + env_output)
+ host_env = android_env(host)
+ print_env(host_env)
+ env.update(host_env)
if log:
- print(">", " ".join(map(str, command)))
+ print(">", join_command(command))
return subprocess.run(command, env=env, **kwargs)
+# Format a command so it can be copied into a shell. Like shlex.join, but also
+# accepts arguments which are Paths, or a single string/Path outside of a list.
+def join_command(args):
+ if isinstance(args, (str, Path)):
+ return str(args)
+ else:
+ return shlex.join(map(str, args))
+
+
+# Format the environment so it can be pasted into a shell.
+def print_env(env):
+ for key, value in sorted(env.items()):
+ print(f"export {key}={shlex.quote(value)}")
+
+
+def android_env(host):
+ if host:
+ prefix = subdir(host) / "prefix"
+ else:
+ prefix = ANDROID_DIR / "prefix"
+ sysconfig_files = prefix.glob("lib/python*/_sysconfigdata__android_*.py")
+ sysconfig_filename = next(sysconfig_files).name
+ host = re.fullmatch(r"_sysconfigdata__android_(.+).py", sysconfig_filename)[1]
+
+ env_script = ANDROID_DIR / "android-env.sh"
+ env_output = subprocess.run(
+ f"set -eu; "
+ f"export HOST={host}; "
+ f"PREFIX={prefix}; "
+ f". {env_script}; "
+ f"export",
+ check=True, shell=True, capture_output=True, encoding='utf-8',
+ ).stdout
+
+ env = {}
+ for line in env_output.splitlines():
+ # We don't require every line to match, as there may be some other
+ # output from installing the NDK.
+ if match := re.search(
+ "^(declare -x |export )?(\\w+)=['\"]?(.*?)['\"]?$", line
+ ):
+ key, value = match[2], match[3]
+ if os.environ.get(key) != value:
+ env[key] = value
+
+ if not env:
+ raise ValueError(f"Found no variables in {env_script.name} output:\n"
+ + env_output)
+ return env
+
+
def build_python_path():
"""The path to the build Python binary."""
build_dir = subdir("build")
@@ -127,7 +160,7 @@ def configure_build_python(context):
clean("build")
os.chdir(subdir("build", create=True))
- command = [relpath(CHECKOUT / "configure")]
+ command = [relpath(PYTHON_DIR / "configure")]
if context.args:
command.extend(context.args)
run(command)
@@ -139,12 +172,13 @@ def make_build_python(context):
def unpack_deps(host, prefix_dir):
+ os.chdir(prefix_dir)
deps_url = "https://github.com/beeware/cpython-android-source-deps/releases/download"
- for name_ver in ["bzip2-1.0.8-2", "libffi-3.4.4-3", "openssl-3.0.15-4",
+ for name_ver in ["bzip2-1.0.8-3", "libffi-3.4.4-3", "openssl-3.0.15-4",
"sqlite-3.49.1-0", "xz-5.4.6-1"]:
filename = f"{name_ver}-{host}.tar.gz"
download(f"{deps_url}/{name_ver}/{filename}")
- shutil.unpack_archive(filename, prefix_dir)
+ shutil.unpack_archive(filename)
os.remove(filename)
@@ -167,7 +201,7 @@ def configure_host_python(context):
os.chdir(host_dir)
command = [
# Basic cross-compiling configuration
- relpath(CHECKOUT / "configure"),
+ relpath(PYTHON_DIR / "configure"),
f"--host={context.host}",
f"--build={sysconfig.get_config_var('BUILD_GNU_TYPE')}",
f"--with-build-python={build_python_path()}",
@@ -196,9 +230,12 @@ def make_host_python(context):
for pattern in ("include/python*", "lib/libpython*", "lib/python*"):
delete_glob(f"{prefix_dir}/{pattern}")
+ # The Android environment variables were already captured in the Makefile by
+ # `configure`, and passing them again when running `make` may cause some
+ # flags to be duplicated. So we don't use the `host` argument here.
os.chdir(host_dir)
- run(["make", "-j", str(os.cpu_count())], host=context.host)
- run(["make", "install", f"prefix={prefix_dir}"], host=context.host)
+ run(["make", "-j", str(os.cpu_count())])
+ run(["make", "install", f"prefix={prefix_dir}"])
def build_all(context):
@@ -228,7 +265,12 @@ def setup_sdk():
if not all((android_home / "licenses" / path).exists() for path in [
"android-sdk-arm-dbt-license", "android-sdk-license"
]):
- run([sdkmanager, "--licenses"], text=True, input="y\n" * 100)
+ run(
+ [sdkmanager, "--licenses"],
+ text=True,
+ capture_output=True,
+ input="y\n" * 100,
+ )
# Gradle may install this automatically, but we can't rely on that because
# we need to run adb within the logcat task.
@@ -474,24 +516,49 @@ async def gradle_task(context):
task_prefix = "connected"
env["ANDROID_SERIAL"] = context.connected
+ hidden_output = []
+
+ def log(line):
+ # Gradle may take several minutes to install SDK packages, so it's worth
+ # showing those messages even in non-verbose mode.
+ if context.verbose or line.startswith('Preparing "Install'):
+ sys.stdout.write(line)
+ else:
+ hidden_output.append(line)
+
+ if context.command:
+ mode = "-c"
+ module = context.command
+ else:
+ mode = "-m"
+ module = context.module or "test"
+
args = [
gradlew, "--console", "plain", f"{task_prefix}DebugAndroidTest",
- "-Pandroid.testInstrumentationRunnerArguments.pythonArgs="
- + shlex.join(context.args),
+ ] + [
+ # Build-time properties
+ f"-Ppython.{name}={value}"
+ for name, value in [
+ ("sitePackages", context.site_packages), ("cwd", context.cwd)
+ ] if value
+ ] + [
+ # Runtime properties
+ f"-Pandroid.testInstrumentationRunnerArguments.python{name}={value}"
+ for name, value in [
+ ("Mode", mode), ("Module", module), ("Args", join_command(context.args))
+ ] if value
]
- hidden_output = []
+ if context.verbose >= 2:
+ args.append("--info")
+ log("> " + join_command(args))
+
try:
async with async_process(
*args, cwd=TESTBED_DIR, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
) as process:
while line := (await process.stdout.readline()).decode(*DECODE_ARGS):
- # Gradle may take several minutes to install SDK packages, so
- # it's worth showing those messages even in non-verbose mode.
- if context.verbose or line.startswith('Preparing "Install'):
- sys.stdout.write(line)
- else:
- hidden_output.append(line)
+ log(line)
status = await wait_for(process.wait(), timeout=1)
if status == 0:
@@ -604,6 +671,10 @@ def package(context):
print(f"Wrote {package_path}")
+def env(context):
+ print_env(android_env(getattr(context, "host", None)))
+
+
# Handle SIGTERM the same way as SIGINT. This ensures that if we're terminated
# by the buildbot worker, we'll make an attempt to clean up our subprocesses.
def install_signal_handler():
@@ -615,36 +686,41 @@ def install_signal_handler():
def parse_args():
parser = argparse.ArgumentParser()
- subcommands = parser.add_subparsers(dest="subcommand")
+ subcommands = parser.add_subparsers(dest="subcommand", required=True)
# Subcommands
- build = subcommands.add_parser("build", help="Build everything")
- configure_build = subcommands.add_parser("configure-build",
- help="Run `configure` for the "
- "build Python")
- make_build = subcommands.add_parser("make-build",
- help="Run `make` for the build Python")
- configure_host = subcommands.add_parser("configure-host",
- help="Run `configure` for Android")
- make_host = subcommands.add_parser("make-host",
- help="Run `make` for Android")
+ build = subcommands.add_parser(
+ "build", help="Run configure-build, make-build, configure-host and "
+ "make-host")
+ configure_build = subcommands.add_parser(
+ "configure-build", help="Run `configure` for the build Python")
subcommands.add_parser(
- "clean", help="Delete all build and prefix directories")
- subcommands.add_parser(
- "build-testbed", help="Build the testbed app")
- test = subcommands.add_parser(
- "test", help="Run the test suite")
+ "make-build", help="Run `make` for the build Python")
+ configure_host = subcommands.add_parser(
+ "configure-host", help="Run `configure` for Android")
+ make_host = subcommands.add_parser(
+ "make-host", help="Run `make` for Android")
+
+ subcommands.add_parser("clean", help="Delete all build directories")
+ subcommands.add_parser("build-testbed", help="Build the testbed app")
+ test = subcommands.add_parser("test", help="Run the testbed app")
package = subcommands.add_parser("package", help="Make a release package")
+ env = subcommands.add_parser("env", help="Print environment variables")
# Common arguments
for subcommand in build, configure_build, configure_host:
subcommand.add_argument(
"--clean", action="store_true", default=False, dest="clean",
- help="Delete the relevant build and prefix directories first")
- for subcommand in [build, configure_host, make_host, package]:
+ help="Delete the relevant build directories first")
+
+ host_commands = [build, configure_host, make_host, package]
+ if in_source_tree:
+ host_commands.append(env)
+ for subcommand in host_commands:
subcommand.add_argument(
"host", metavar="HOST", choices=HOSTS,
help="Host triplet: choices=[%(choices)s]")
+
for subcommand in build, configure_build, configure_host:
subcommand.add_argument("args", nargs="*",
help="Extra arguments to pass to `configure`")
@@ -654,6 +730,7 @@ def parse_args():
"-v", "--verbose", action="count", default=0,
help="Show Gradle output, and non-Python logcat messages. "
"Use twice to include high-volume messages which are rarely useful.")
+
device_group = test.add_mutually_exclusive_group(required=True)
device_group.add_argument(
"--connected", metavar="SERIAL", help="Run on a connected device. "
@@ -661,8 +738,24 @@ def parse_args():
device_group.add_argument(
"--managed", metavar="NAME", help="Run on a Gradle-managed device. "
"These are defined in `managedDevices` in testbed/app/build.gradle.kts.")
+
+ test.add_argument(
+ "--site-packages", metavar="DIR", type=abspath,
+ help="Directory to copy as the app's site-packages.")
test.add_argument(
- "args", nargs="*", help=f"Arguments for `python -m test`. "
+ "--cwd", metavar="DIR", type=abspath,
+ help="Directory to copy as the app's working directory.")
+
+ mode_group = test.add_mutually_exclusive_group()
+ mode_group.add_argument(
+ "-c", dest="command", help="Execute the given Python code.")
+ mode_group.add_argument(
+ "-m", dest="module", help="Execute the module with the given name.")
+ test.epilog = (
+ "If neither -c nor -m are passed, the default is '-m test', which will "
+ "run Python's own test suite.")
+ test.add_argument(
+ "args", nargs="*", help=f"Arguments to add to sys.argv. "
f"Separate them from {SCRIPT_NAME}'s own arguments with `--`.")
return parser.parse_args()
@@ -688,6 +781,7 @@ def main():
"build-testbed": build_testbed,
"test": run_testbed,
"package": package,
+ "env": env,
}
try:
@@ -708,14 +802,9 @@ def print_called_process_error(e):
if not content.endswith("\n"):
stream.write("\n")
- # Format the command so it can be copied into a shell. shlex uses single
- # quotes, so we surround the whole command with double quotes.
- args_joined = (
- e.cmd if isinstance(e.cmd, str)
- else " ".join(shlex.quote(str(arg)) for arg in e.cmd)
- )
+ # shlex uses single quotes, so we surround the command with double quotes.
print(
- f'Command "{args_joined}" returned exit status {e.returncode}'
+ f'Command "{join_command(e.cmd)}" returned exit status {e.returncode}'
)
diff --git a/Android/testbed/app/build.gradle.kts b/Android/testbed/app/build.gradle.kts
index c627cb1b0e0..92cffd61f86 100644
--- a/Android/testbed/app/build.gradle.kts
+++ b/Android/testbed/app/build.gradle.kts
@@ -85,7 +85,7 @@ android {
minSdk = androidEnvFile.useLines {
for (line in it) {
- """api_level:=(\d+)""".toRegex().find(line)?.let {
+ """ANDROID_API_LEVEL:=(\d+)""".toRegex().find(line)?.let {
return@useLines it.groupValues[1].toInt()
}
}
@@ -205,11 +205,29 @@ androidComponents.onVariants { variant ->
into("site-packages") {
from("$projectDir/src/main/python")
+
+ val sitePackages = findProperty("python.sitePackages") as String?
+ if (!sitePackages.isNullOrEmpty()) {
+ if (!file(sitePackages).exists()) {
+ throw GradleException("$sitePackages does not exist")
+ }
+ from(sitePackages)
+ }
}
duplicatesStrategy = DuplicatesStrategy.EXCLUDE
exclude("**/__pycache__")
}
+
+ into("cwd") {
+ val cwd = findProperty("python.cwd") as String?
+ if (!cwd.isNullOrEmpty()) {
+ if (!file(cwd).exists()) {
+ throw GradleException("$cwd does not exist")
+ }
+ from(cwd)
+ }
+ }
}
}
diff --git a/Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt b/Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt
index 0e888ab71d8..94be52dd2dc 100644
--- a/Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt
+++ b/Android/testbed/app/src/androidTest/java/org/python/testbed/PythonSuite.kt
@@ -17,11 +17,11 @@ class PythonSuite {
fun testPython() {
val start = System.currentTimeMillis()
try {
- val context =
+ val status = PythonTestRunner(
InstrumentationRegistry.getInstrumentation().targetContext
- val args =
- InstrumentationRegistry.getArguments().getString("pythonArgs", "")
- val status = PythonTestRunner(context).run(args)
+ ).run(
+ InstrumentationRegistry.getArguments()
+ )
assertEquals(0, status)
} finally {
// Make sure the process lives long enough for the test script to
diff --git a/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt b/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt
index c4bf6cbe83d..ef28948486f 100644
--- a/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt
+++ b/Android/testbed/app/src/main/java/org/python/testbed/MainActivity.kt
@@ -15,17 +15,29 @@ class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
- val status = PythonTestRunner(this).run("-W -uall")
+ val status = PythonTestRunner(this).run("-m", "test", "-W -uall")
findViewById<TextView>(R.id.tvHello).text = "Exit status $status"
}
}
class PythonTestRunner(val context: Context) {
- /** @param args Extra arguments for `python -m test`.
- * @return The Python exit status: zero if the tests passed, nonzero if
- * they failed. */
- fun run(args: String = "") : Int {
+ fun run(instrumentationArgs: Bundle) = run(
+ instrumentationArgs.getString("pythonMode")!!,
+ instrumentationArgs.getString("pythonModule")!!,
+ instrumentationArgs.getString("pythonArgs") ?: "",
+ )
+
+ /** Run Python.
+ *
+ * @param mode Either "-c" or "-m".
+ * @param module Python statements for "-c" mode, or a module name for
+ * "-m" mode.
+ * @param args Arguments to add to sys.argv. Will be parsed by `shlex.split`.
+ * @return The Python exit status: zero on success, nonzero on failure. */
+ fun run(mode: String, module: String, args: String) : Int {
+ Os.setenv("PYTHON_MODE", mode, true)
+ Os.setenv("PYTHON_MODULE", module, true)
Os.setenv("PYTHON_ARGS", args, true)
// Python needs this variable to help it find the temporary directory,
@@ -36,8 +48,9 @@ class PythonTestRunner(val context: Context) {
System.loadLibrary("main_activity")
redirectStdioToLogcat()
- // The main module is in src/main/python/main.py.
- return runPython(pythonHome.toString(), "main")
+ // The main module is in src/main/python. We don't simply call it
+ // "main", as that could clash with third-party test code.
+ return runPython(pythonHome.toString(), "android_testbed_main")
}
private fun extractAssets() : File {
diff --git a/Android/testbed/app/src/main/python/main.py b/Android/testbed/app/src/main/python/android_testbed_main.py
index d6941b14412..31b8e5343a8 100644
--- a/Android/testbed/app/src/main/python/main.py
+++ b/Android/testbed/app/src/main/python/android_testbed_main.py
@@ -26,7 +26,23 @@ import sys
# test_signals in test_threadsignals.py.
signal.pthread_sigmask(signal.SIG_UNBLOCK, [signal.SIGUSR1])
+mode = os.environ["PYTHON_MODE"]
+module = os.environ["PYTHON_MODULE"]
sys.argv[1:] = shlex.split(os.environ["PYTHON_ARGS"])
-# The test module will call sys.exit to indicate whether the tests passed.
-runpy.run_module("test")
+cwd = f"{sys.prefix}/cwd"
+if not os.path.exists(cwd):
+ # Empty directories are lost in the asset packing/unpacking process.
+ os.mkdir(cwd)
+os.chdir(cwd)
+
+if mode == "-c":
+ # In -c mode, sys.path starts with an empty string, which means whatever the current
+ # working directory is at the moment of each import.
+ sys.path.insert(0, "")
+ exec(module, {})
+elif mode == "-m":
+ sys.path.insert(0, os.getcwd())
+ runpy.run_module(module, run_name="__main__", alter_sys=True)
+else:
+ raise ValueError(f"unknown mode: {mode}")
diff --git a/Android/testbed/build.gradle.kts b/Android/testbed/build.gradle.kts
index 4d1d6f87594..451517b3f1a 100644
--- a/Android/testbed/build.gradle.kts
+++ b/Android/testbed/build.gradle.kts
@@ -1,5 +1,5 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
- id("com.android.application") version "8.6.1" apply false
+ id("com.android.application") version "8.10.0" apply false
id("org.jetbrains.kotlin.android") version "1.9.22" apply false
}
diff --git a/Android/testbed/gradle/wrapper/gradle-wrapper.properties b/Android/testbed/gradle/wrapper/gradle-wrapper.properties
index 36529c89642..5d42fbae084 100644
--- a/Android/testbed/gradle/wrapper/gradle-wrapper.properties
+++ b/Android/testbed/gradle/wrapper/gradle-wrapper.properties
@@ -1,6 +1,6 @@
#Mon Feb 19 20:29:06 GMT 2024
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-8.11.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
diff --git a/Doc/c-api/lifecycle.rst b/Doc/c-api/lifecycle.rst
index 0e2ffc096ca..5a170862a26 100644
--- a/Doc/c-api/lifecycle.rst
+++ b/Doc/c-api/lifecycle.rst
@@ -55,16 +55,14 @@ that must be true for *B* to occur after *A*.
.. image:: lifecycle.dot.svg
:align: center
:class: invert-in-dark-mode
- :alt: Diagram showing events in an object's life. Explained in detail
- below.
+ :alt: Diagram showing events in an object's life. Explained in detail below.
.. only:: latex
.. image:: lifecycle.dot.pdf
:align: center
:class: invert-in-dark-mode
- :alt: Diagram showing events in an object's life. Explained in detail
- below.
+ :alt: Diagram showing events in an object's life. Explained in detail below.
.. container::
:name: life-events-graph-description
diff --git a/Doc/c-api/module.rst b/Doc/c-api/module.rst
index f7f4d37d4c7..710135dca89 100644
--- a/Doc/c-api/module.rst
+++ b/Doc/c-api/module.rst
@@ -288,22 +288,40 @@ An alternate way to specify extensions is to request "multi-phase initialization
Extension modules created this way behave more like Python modules: the
initialization is split between the *creation phase*, when the module object
is created, and the *execution phase*, when it is populated.
-The distinction is similar to the :py:meth:`!__new__` and :py:meth:`!__init__` methods
-of classes.
+The distinction is similar to the :py:meth:`~object.__new__` and
+:py:meth:`~object.__init__` methods of classes.
Unlike modules created using single-phase initialization, these modules are not
-singletons: if the *sys.modules* entry is removed and the module is re-imported,
-a new module object is created, and the old module is subject to normal garbage
-collection -- as with Python modules.
-By default, multiple modules created from the same definition should be
-independent: changes to one should not affect the others.
-This means that all state should be specific to the module object (using e.g.
-using :c:func:`PyModule_GetState`), or its contents (such as the module's
-:attr:`~object.__dict__` or individual classes created with :c:func:`PyType_FromSpec`).
+singletons.
+For example, if the :py:attr:`sys.modules` entry is removed and the module
+is re-imported, a new module object is created, and typically populated with
+fresh method and type objects.
+The old module is subject to normal garbage collection.
+This mirrors the behavior of pure-Python modules.
+
+Additional module instances may be created in
+:ref:`sub-interpreters <sub-interpreter-support>`
+or after after Python runtime reinitialization
+(:c:func:`Py_Finalize` and :c:func:`Py_Initialize`).
+In these cases, sharing Python objects between module instances would likely
+cause crashes or undefined behavior.
+
+To avoid such issues, each instance of an extension module should
+be *isolated*: changes to one instance should not implicitly affect the others,
+and all state, including references to Python objects, should be specific to
+a particular module instance.
+See :ref:`isolating-extensions-howto` for more details and a practical guide.
+
+A simpler way to avoid these issues is
+:ref:`raising an error on repeated initialization <isolating-extensions-optout>`.
All modules created using multi-phase initialization are expected to support
-:ref:`sub-interpreters <sub-interpreter-support>`. Making sure multiple modules
-are independent is typically enough to achieve this.
+:ref:`sub-interpreters <sub-interpreter-support>`, or otherwise explicitly
+signal a lack of support.
+This is usually achieved by isolation or blocking repeated initialization,
+as above.
+A module may also be limited to the main interpreter using
+the :c:data:`Py_mod_multiple_interpreters` slot.
To request multi-phase initialization, the initialization function
(PyInit_modulename) returns a :c:type:`PyModuleDef` instance with non-empty
diff --git a/Doc/c-api/stable.rst b/Doc/c-api/stable.rst
index 124e58cf950..9b65e0b8d23 100644
--- a/Doc/c-api/stable.rst
+++ b/Doc/c-api/stable.rst
@@ -51,6 +51,7 @@ It is generally intended for specialized, low-level tools like debuggers.
Projects that use this API are expected to follow
CPython development and spend extra effort adjusting to changes.
+.. _stable-application-binary-interface:
Stable Application Binary Interface
===================================
diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst
index ef180464ef1..45f50ba5f97 100644
--- a/Doc/c-api/unicode.rst
+++ b/Doc/c-api/unicode.rst
@@ -1461,10 +1461,6 @@ the user settings on the machine running the codec.
.. versionadded:: 3.3
-Methods & Slots
-"""""""""""""""
-
-
.. _unicodemethodsandslots:
Methods and Slot Functions
diff --git a/Doc/deprecations/pending-removal-in-3.19.rst b/Doc/deprecations/pending-removal-in-3.19.rst
index 3936f63ca5b..25f9cba390d 100644
--- a/Doc/deprecations/pending-removal-in-3.19.rst
+++ b/Doc/deprecations/pending-removal-in-3.19.rst
@@ -6,3 +6,19 @@ Pending removal in Python 3.19
* Implicitly switching to the MSVC-compatible struct layout by setting
:attr:`~ctypes.Structure._pack_` but not :attr:`~ctypes.Structure._layout_`
on non-Windows platforms.
+
+* :mod:`hashlib`:
+
+ - In hash function constructors such as :func:`~hashlib.new` or the
+ direct hash-named constructors such as :func:`~hashlib.md5` and
+ :func:`~hashlib.sha256`, their optional initial data parameter could
+ also be passed a keyword argument named ``data=`` or ``string=`` in
+ various :mod:`!hashlib` implementations.
+
+ Support for the ``string`` keyword argument name is now deprecated
+ and slated for removal in Python 3.19.
+
+ Before Python 3.13, the ``string`` keyword parameter was not correctly
+ supported depending on the backend implementation of hash functions.
+ Prefer passing the initial data as a positional argument for maximum
+ backwards compatibility.
diff --git a/Doc/extending/windows.rst b/Doc/extending/windows.rst
index 56aa44e4e58..a97c6182553 100644
--- a/Doc/extending/windows.rst
+++ b/Doc/extending/windows.rst
@@ -121,7 +121,7 @@ When creating DLLs in Windows, you can use the CPython library in two ways:
:file:`Python.h` triggers an implicit, configure-aware link with the
library. The header file chooses :file:`pythonXY_d.lib` for Debug,
:file:`pythonXY.lib` for Release, and :file:`pythonX.lib` for Release with
- the `Limited API <stable-application-binary-interface>`_ enabled.
+ the :ref:`Limited API <stable-application-binary-interface>` enabled.
To build two DLLs, spam and ni (which uses C functions found in spam), you
could use these commands::
diff --git a/Doc/howto/isolating-extensions.rst b/Doc/howto/isolating-extensions.rst
index 5513cd73675..b2109b15039 100644
--- a/Doc/howto/isolating-extensions.rst
+++ b/Doc/howto/isolating-extensions.rst
@@ -168,7 +168,7 @@ possible, consider explicit locking.
If it is necessary to use process-global state, the simplest way to
avoid issues with multiple interpreters is to explicitly prevent a
module from being loaded more than once per process—see
-`Opt-Out: Limiting to One Module Object per Process`_.
+:ref:`isolating-extensions-optout`.
Managing Per-Module State
@@ -207,6 +207,8 @@ An example of a module with per-module state is currently available as
example module initialization shown at the bottom of the file.
+.. _isolating-extensions-optout:
+
Opt-Out: Limiting to One Module Object per Process
--------------------------------------------------
diff --git a/Doc/howto/urllib2.rst b/Doc/howto/urllib2.rst
index 33a2a7ea89e..d79d1abe8d0 100644
--- a/Doc/howto/urllib2.rst
+++ b/Doc/howto/urllib2.rst
@@ -245,75 +245,27 @@ codes in the 100--299 range indicate success, you will usually only see error
codes in the 400--599 range.
:attr:`http.server.BaseHTTPRequestHandler.responses` is a useful dictionary of
-response codes in that shows all the response codes used by :rfc:`2616`. The
-dictionary is reproduced here for convenience ::
+response codes that shows all the response codes used by :rfc:`2616`.
+An excerpt from the dictionary is shown below ::
- # Table mapping response codes to messages; entries have the
- # form {code: (shortmessage, longmessage)}.
responses = {
- 100: ('Continue', 'Request received, please continue'),
- 101: ('Switching Protocols',
- 'Switching to new protocol; obey Upgrade header'),
-
- 200: ('OK', 'Request fulfilled, document follows'),
- 201: ('Created', 'Document created, URL follows'),
- 202: ('Accepted',
- 'Request accepted, processing continues off-line'),
- 203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
- 204: ('No Content', 'Request fulfilled, nothing follows'),
- 205: ('Reset Content', 'Clear input form for further input.'),
- 206: ('Partial Content', 'Partial content follows.'),
-
- 300: ('Multiple Choices',
- 'Object has several resources -- see URI list'),
- 301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
- 302: ('Found', 'Object moved temporarily -- see URI list'),
- 303: ('See Other', 'Object moved -- see Method and URL list'),
- 304: ('Not Modified',
- 'Document has not changed since given time'),
- 305: ('Use Proxy',
- 'You must use proxy specified in Location to access this '
- 'resource.'),
- 307: ('Temporary Redirect',
- 'Object moved temporarily -- see URI list'),
-
- 400: ('Bad Request',
- 'Bad request syntax or unsupported method'),
- 401: ('Unauthorized',
- 'No permission -- see authorization schemes'),
- 402: ('Payment Required',
- 'No payment -- see charging schemes'),
- 403: ('Forbidden',
- 'Request forbidden -- authorization will not help'),
- 404: ('Not Found', 'Nothing matches the given URI'),
- 405: ('Method Not Allowed',
- 'Specified method is invalid for this server.'),
- 406: ('Not Acceptable', 'URI not available in preferred format.'),
- 407: ('Proxy Authentication Required', 'You must authenticate with '
- 'this proxy before proceeding.'),
- 408: ('Request Timeout', 'Request timed out; try again later.'),
- 409: ('Conflict', 'Request conflict.'),
- 410: ('Gone',
- 'URI no longer exists and has been permanently removed.'),
- 411: ('Length Required', 'Client must specify Content-Length.'),
- 412: ('Precondition Failed', 'Precondition in headers is false.'),
- 413: ('Request Entity Too Large', 'Entity is too large.'),
- 414: ('Request-URI Too Long', 'URI is too long.'),
- 415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
- 416: ('Requested Range Not Satisfiable',
- 'Cannot satisfy request range.'),
- 417: ('Expectation Failed',
- 'Expect condition could not be satisfied.'),
-
- 500: ('Internal Server Error', 'Server got itself in trouble'),
- 501: ('Not Implemented',
- 'Server does not support this operation'),
- 502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
- 503: ('Service Unavailable',
- 'The server cannot process the request due to a high load'),
- 504: ('Gateway Timeout',
- 'The gateway server did not receive a timely response'),
- 505: ('HTTP Version Not Supported', 'Cannot fulfill request.'),
+ ...
+ <HTTPStatus.OK: 200>: ('OK', 'Request fulfilled, document follows'),
+ ...
+ <HTTPStatus.FORBIDDEN: 403>: ('Forbidden',
+ 'Request forbidden -- authorization will '
+ 'not help'),
+ <HTTPStatus.NOT_FOUND: 404>: ('Not Found',
+ 'Nothing matches the given URI'),
+ ...
+ <HTTPStatus.IM_A_TEAPOT: 418>: ("I'm a Teapot",
+ 'Server refuses to brew coffee because '
+ 'it is a teapot'),
+ ...
+ <HTTPStatus.SERVICE_UNAVAILABLE: 503>: ('Service Unavailable',
+ 'The server cannot process the '
+ 'request due to a high load'),
+ ...
}
When an error is raised the server responds by returning an HTTP error code
diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst
index 29396c7a036..17f126cc065 100644
--- a/Doc/library/argparse.rst
+++ b/Doc/library/argparse.rst
@@ -2122,12 +2122,15 @@ Partial parsing
.. method:: ArgumentParser.parse_known_args(args=None, namespace=None)
- Sometimes a script may only parse a few of the command-line arguments, passing
- the remaining arguments on to another script or program. In these cases, the
- :meth:`~ArgumentParser.parse_known_args` method can be useful. It works much like
- :meth:`~ArgumentParser.parse_args` except that it does not produce an error when
- extra arguments are present. Instead, it returns a two item tuple containing
- the populated namespace and the list of remaining argument strings.
+ Sometimes a script only needs to handle a specific set of command-line
+ arguments, leaving any unrecognized arguments for another script or program.
+ In these cases, the :meth:`~ArgumentParser.parse_known_args` method can be
+ useful.
+
+ This method works similarly to :meth:`~ArgumentParser.parse_args`, but it does
+ not raise an error for extra, unrecognized arguments. Instead, it parses the
+ known arguments and returns a two item tuple that contains the populated
+ namespace and the list of any unrecognized arguments.
::
diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst
index ca9a6b0712c..ef6c62dca1e 100644
--- a/Doc/library/ast.rst
+++ b/Doc/library/ast.rst
@@ -252,12 +252,11 @@ Root nodes
>>> print(ast.dump(ast.parse('(int, str) -> List[int]', mode='func_type'), indent=4))
FunctionType(
argtypes=[
- Name(id='int', ctx=Load()),
- Name(id='str', ctx=Load())],
+ Name(id='int'),
+ Name(id='str')],
returns=Subscript(
- value=Name(id='List', ctx=Load()),
- slice=Name(id='int', ctx=Load()),
- ctx=Load()))
+ value=Name(id='List'),
+ slice=Name(id='int')))
.. versionadded:: 3.8
@@ -268,9 +267,9 @@ Literals
.. class:: Constant(value)
A constant value. The ``value`` attribute of the ``Constant`` literal contains the
- Python object it represents. The values represented can be simple types
- such as a number, string or ``None``, but also immutable container types
- (tuples and frozensets) if all of their elements are constant.
+ Python object it represents. The values represented can be instances of :class:`str`,
+ :class:`bytes`, :class:`int`, :class:`float`, :class:`complex`, and :class:`bool`,
+ and the constants :data:`None` and :data:`Ellipsis`.
.. doctest::
@@ -312,14 +311,14 @@ Literals
values=[
Constant(value='sin('),
FormattedValue(
- value=Name(id='a', ctx=Load()),
+ value=Name(id='a'),
conversion=-1),
Constant(value=') is '),
FormattedValue(
value=Call(
- func=Name(id='sin', ctx=Load()),
+ func=Name(id='sin'),
args=[
- Name(id='a', ctx=Load())]),
+ Name(id='a')]),
conversion=-1,
format_spec=JoinedStr(
values=[
@@ -341,16 +340,14 @@ Literals
elts=[
Constant(value=1),
Constant(value=2),
- Constant(value=3)],
- ctx=Load()))
+ Constant(value=3)]))
>>> print(ast.dump(ast.parse('(1, 2, 3)', mode='eval'), indent=4))
Expression(
body=Tuple(
elts=[
Constant(value=1),
Constant(value=2),
- Constant(value=3)],
- ctx=Load()))
+ Constant(value=3)]))
.. class:: Set(elts)
@@ -388,7 +385,7 @@ Literals
None],
values=[
Constant(value=1),
- Name(id='d', ctx=Load())]))
+ Name(id='d')]))
Variables
@@ -414,7 +411,7 @@ Variables
Module(
body=[
Expr(
- value=Name(id='a', ctx=Load()))])
+ value=Name(id='a'))])
>>> print(ast.dump(ast.parse('a = 1'), indent=4))
Module(
@@ -452,7 +449,7 @@ Variables
value=Name(id='b', ctx=Store()),
ctx=Store())],
ctx=Store())],
- value=Name(id='it', ctx=Load()))])
+ value=Name(id='it'))])
.. _ast-expressions:
@@ -475,7 +472,7 @@ Expressions
Expr(
value=UnaryOp(
op=USub(),
- operand=Name(id='a', ctx=Load())))])
+ operand=Name(id='a')))])
.. class:: UnaryOp(op, operand)
@@ -498,7 +495,7 @@ Expressions
Expression(
body=UnaryOp(
op=Not(),
- operand=Name(id='x', ctx=Load())))
+ operand=Name(id='x')))
.. class:: BinOp(left, op, right)
@@ -511,9 +508,9 @@ Expressions
>>> print(ast.dump(ast.parse('x + y', mode='eval'), indent=4))
Expression(
body=BinOp(
- left=Name(id='x', ctx=Load()),
+ left=Name(id='x'),
op=Add(),
- right=Name(id='y', ctx=Load())))
+ right=Name(id='y')))
.. class:: Add
@@ -549,8 +546,8 @@ Expressions
body=BoolOp(
op=Or(),
values=[
- Name(id='x', ctx=Load()),
- Name(id='y', ctx=Load())]))
+ Name(id='x'),
+ Name(id='y')]))
.. class:: And
@@ -575,7 +572,7 @@ Expressions
LtE(),
Lt()],
comparators=[
- Name(id='a', ctx=Load()),
+ Name(id='a'),
Constant(value=10)]))
@@ -609,18 +606,17 @@ Expressions
>>> print(ast.dump(ast.parse('func(a, b=c, *d, **e)', mode='eval'), indent=4))
Expression(
body=Call(
- func=Name(id='func', ctx=Load()),
+ func=Name(id='func'),
args=[
- Name(id='a', ctx=Load()),
+ Name(id='a'),
Starred(
- value=Name(id='d', ctx=Load()),
- ctx=Load())],
+ value=Name(id='d'))],
keywords=[
keyword(
arg='b',
- value=Name(id='c', ctx=Load())),
+ value=Name(id='c')),
keyword(
- value=Name(id='e', ctx=Load()))]))
+ value=Name(id='e'))]))
.. class:: keyword(arg, value)
@@ -639,9 +635,9 @@ Expressions
>>> print(ast.dump(ast.parse('a if b else c', mode='eval'), indent=4))
Expression(
body=IfExp(
- test=Name(id='b', ctx=Load()),
- body=Name(id='a', ctx=Load()),
- orelse=Name(id='c', ctx=Load())))
+ test=Name(id='b'),
+ body=Name(id='a'),
+ orelse=Name(id='c')))
.. class:: Attribute(value, attr, ctx)
@@ -656,9 +652,8 @@ Expressions
>>> print(ast.dump(ast.parse('snake.colour', mode='eval'), indent=4))
Expression(
body=Attribute(
- value=Name(id='snake', ctx=Load()),
- attr='colour',
- ctx=Load()))
+ value=Name(id='snake'),
+ attr='colour'))
.. class:: NamedExpr(target, value)
@@ -694,15 +689,13 @@ Subscripting
>>> print(ast.dump(ast.parse('l[1:2, 3]', mode='eval'), indent=4))
Expression(
body=Subscript(
- value=Name(id='l', ctx=Load()),
+ value=Name(id='l'),
slice=Tuple(
elts=[
Slice(
lower=Constant(value=1),
upper=Constant(value=2)),
- Constant(value=3)],
- ctx=Load()),
- ctx=Load()))
+ Constant(value=3)])))
.. class:: Slice(lower, upper, step)
@@ -716,11 +709,10 @@ Subscripting
>>> print(ast.dump(ast.parse('l[1:2]', mode='eval'), indent=4))
Expression(
body=Subscript(
- value=Name(id='l', ctx=Load()),
+ value=Name(id='l'),
slice=Slice(
lower=Constant(value=1),
- upper=Constant(value=2)),
- ctx=Load()))
+ upper=Constant(value=2))))
Comprehensions
@@ -745,11 +737,11 @@ Comprehensions
... ))
Expression(
body=ListComp(
- elt=Name(id='x', ctx=Load()),
+ elt=Name(id='x'),
generators=[
comprehension(
target=Name(id='x', ctx=Store()),
- iter=Name(id='numbers', ctx=Load()),
+ iter=Name(id='numbers'),
is_async=0)]))
>>> print(ast.dump(
... ast.parse('{x: x**2 for x in numbers}', mode='eval'),
@@ -757,15 +749,15 @@ Comprehensions
... ))
Expression(
body=DictComp(
- key=Name(id='x', ctx=Load()),
+ key=Name(id='x'),
value=BinOp(
- left=Name(id='x', ctx=Load()),
+ left=Name(id='x'),
op=Pow(),
right=Constant(value=2)),
generators=[
comprehension(
target=Name(id='x', ctx=Store()),
- iter=Name(id='numbers', ctx=Load()),
+ iter=Name(id='numbers'),
is_async=0)]))
>>> print(ast.dump(
... ast.parse('{x for x in numbers}', mode='eval'),
@@ -773,11 +765,11 @@ Comprehensions
... ))
Expression(
body=SetComp(
- elt=Name(id='x', ctx=Load()),
+ elt=Name(id='x'),
generators=[
comprehension(
target=Name(id='x', ctx=Store()),
- iter=Name(id='numbers', ctx=Load()),
+ iter=Name(id='numbers'),
is_async=0)]))
@@ -798,17 +790,17 @@ Comprehensions
Expression(
body=ListComp(
elt=Call(
- func=Name(id='ord', ctx=Load()),
+ func=Name(id='ord'),
args=[
- Name(id='c', ctx=Load())]),
+ Name(id='c')]),
generators=[
comprehension(
target=Name(id='line', ctx=Store()),
- iter=Name(id='file', ctx=Load()),
+ iter=Name(id='file'),
is_async=0),
comprehension(
target=Name(id='c', ctx=Store()),
- iter=Name(id='line', ctx=Load()),
+ iter=Name(id='line'),
is_async=0)]))
>>> print(ast.dump(ast.parse('(n**2 for n in it if n>5 if n<10)', mode='eval'),
@@ -816,22 +808,22 @@ Comprehensions
Expression(
body=GeneratorExp(
elt=BinOp(
- left=Name(id='n', ctx=Load()),
+ left=Name(id='n'),
op=Pow(),
right=Constant(value=2)),
generators=[
comprehension(
target=Name(id='n', ctx=Store()),
- iter=Name(id='it', ctx=Load()),
+ iter=Name(id='it'),
ifs=[
Compare(
- left=Name(id='n', ctx=Load()),
+ left=Name(id='n'),
ops=[
Gt()],
comparators=[
Constant(value=5)]),
Compare(
- left=Name(id='n', ctx=Load()),
+ left=Name(id='n'),
ops=[
Lt()],
comparators=[
@@ -842,11 +834,11 @@ Comprehensions
... indent=4)) # Async comprehension
Expression(
body=ListComp(
- elt=Name(id='i', ctx=Load()),
+ elt=Name(id='i'),
generators=[
comprehension(
target=Name(id='i', ctx=Store()),
- iter=Name(id='soc', ctx=Load()),
+ iter=Name(id='soc'),
is_async=1)]))
@@ -888,7 +880,7 @@ Statements
Name(id='a', ctx=Store()),
Name(id='b', ctx=Store())],
ctx=Store())],
- value=Name(id='c', ctx=Load()))])
+ value=Name(id='c'))])
.. class:: AnnAssign(target, annotation, value, simple)
@@ -911,7 +903,7 @@ Statements
body=[
AnnAssign(
target=Name(id='c', ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
simple=1)])
>>> print(ast.dump(ast.parse('(a): int = 1'), indent=4)) # Annotation with parenthesis
@@ -919,7 +911,7 @@ Statements
body=[
AnnAssign(
target=Name(id='a', ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
value=Constant(value=1),
simple=0)])
@@ -928,10 +920,10 @@ Statements
body=[
AnnAssign(
target=Attribute(
- value=Name(id='a', ctx=Load()),
+ value=Name(id='a'),
attr='b',
ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
simple=0)])
>>> print(ast.dump(ast.parse('a[1]: int'), indent=4)) # Subscript annotation
@@ -939,10 +931,10 @@ Statements
body=[
AnnAssign(
target=Subscript(
- value=Name(id='a', ctx=Load()),
+ value=Name(id='a'),
slice=Constant(value=1),
ctx=Store()),
- annotation=Name(id='int', ctx=Load()),
+ annotation=Name(id='int'),
simple=0)])
@@ -979,8 +971,8 @@ Statements
Module(
body=[
Raise(
- exc=Name(id='x', ctx=Load()),
- cause=Name(id='y', ctx=Load()))])
+ exc=Name(id='x'),
+ cause=Name(id='y'))])
.. class:: Assert(test, msg)
@@ -994,8 +986,8 @@ Statements
Module(
body=[
Assert(
- test=Name(id='x', ctx=Load()),
- msg=Name(id='y', ctx=Load()))])
+ test=Name(id='x'),
+ msg=Name(id='y'))])
.. class:: Delete(targets)
@@ -1041,7 +1033,7 @@ Statements
body=[
TypeAlias(
name=Name(id='Alias', ctx=Store()),
- value=Name(id='int', ctx=Load()))])
+ value=Name(id='int'))])
.. versionadded:: 3.12
@@ -1134,13 +1126,13 @@ Control flow
Module(
body=[
If(
- test=Name(id='x', ctx=Load()),
+ test=Name(id='x'),
body=[
Expr(
value=Constant(value=Ellipsis))],
orelse=[
If(
- test=Name(id='y', ctx=Load()),
+ test=Name(id='y'),
body=[
Expr(
value=Constant(value=Ellipsis))],
@@ -1174,7 +1166,7 @@ Control flow
body=[
For(
target=Name(id='x', ctx=Store()),
- iter=Name(id='y', ctx=Load()),
+ iter=Name(id='y'),
body=[
Expr(
value=Constant(value=Ellipsis))],
@@ -1199,7 +1191,7 @@ Control flow
Module(
body=[
While(
- test=Name(id='x', ctx=Load()),
+ test=Name(id='x'),
body=[
Expr(
value=Constant(value=Ellipsis))],
@@ -1227,11 +1219,11 @@ Control flow
body=[
For(
target=Name(id='a', ctx=Store()),
- iter=Name(id='b', ctx=Load()),
+ iter=Name(id='b'),
body=[
If(
test=Compare(
- left=Name(id='a', ctx=Load()),
+ left=Name(id='a'),
ops=[
Gt()],
comparators=[
@@ -1269,12 +1261,12 @@ Control flow
value=Constant(value=Ellipsis))],
handlers=[
ExceptHandler(
- type=Name(id='Exception', ctx=Load()),
+ type=Name(id='Exception'),
body=[
Expr(
value=Constant(value=Ellipsis))]),
ExceptHandler(
- type=Name(id='OtherException', ctx=Load()),
+ type=Name(id='OtherException'),
name='e',
body=[
Expr(
@@ -1309,7 +1301,7 @@ Control flow
value=Constant(value=Ellipsis))],
handlers=[
ExceptHandler(
- type=Name(id='Exception', ctx=Load()),
+ type=Name(id='Exception'),
body=[
Expr(
value=Constant(value=Ellipsis))])])])
@@ -1337,12 +1329,12 @@ Control flow
body=[
Expr(
value=BinOp(
- left=Name(id='a', ctx=Load()),
+ left=Name(id='a'),
op=Add(),
right=Constant(value=1)))],
handlers=[
ExceptHandler(
- type=Name(id='TypeError', ctx=Load()),
+ type=Name(id='TypeError'),
body=[
Pass()])])])
@@ -1375,18 +1367,18 @@ Control flow
With(
items=[
withitem(
- context_expr=Name(id='a', ctx=Load()),
+ context_expr=Name(id='a'),
optional_vars=Name(id='b', ctx=Store())),
withitem(
- context_expr=Name(id='c', ctx=Load()),
+ context_expr=Name(id='c'),
optional_vars=Name(id='d', ctx=Store()))],
body=[
Expr(
value=Call(
- func=Name(id='something', ctx=Load()),
+ func=Name(id='something'),
args=[
- Name(id='b', ctx=Load()),
- Name(id='d', ctx=Load())]))])])
+ Name(id='b'),
+ Name(id='d')]))])])
Pattern matching
@@ -1426,14 +1418,14 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSequence(
patterns=[
MatchAs(name='x')]),
guard=Compare(
- left=Name(id='x', ctx=Load()),
+ left=Name(id='x'),
ops=[
Gt()],
comparators=[
@@ -1443,7 +1435,7 @@ Pattern matching
value=Constant(value=Ellipsis))]),
match_case(
pattern=MatchClass(
- cls=Name(id='tuple', ctx=Load())),
+ cls=Name(id='tuple')),
body=[
Expr(
value=Constant(value=Ellipsis))])])])
@@ -1467,7 +1459,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchValue(
@@ -1494,7 +1486,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSingleton(value=None),
@@ -1521,7 +1513,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSequence(
@@ -1554,7 +1546,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchSequence(
@@ -1603,7 +1595,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchMapping(
@@ -1653,11 +1645,11 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchClass(
- cls=Name(id='Point2D', ctx=Load()),
+ cls=Name(id='Point2D'),
patterns=[
MatchValue(
value=Constant(value=0)),
@@ -1668,7 +1660,7 @@ Pattern matching
value=Constant(value=Ellipsis))]),
match_case(
pattern=MatchClass(
- cls=Name(id='Point3D', ctx=Load()),
+ cls=Name(id='Point3D'),
kwd_attrs=[
'x',
'y',
@@ -1709,7 +1701,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchAs(
@@ -1746,7 +1738,7 @@ Pattern matching
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchOr(
@@ -1786,7 +1778,7 @@ Type annotations
body=[
AnnAssign(
target=Name(id='x', ctx=Store()),
- annotation=Name(id='bool', ctx=Load()),
+ annotation=Name(id='bool'),
value=Constant(value=1),
simple=1)],
type_ignores=[
@@ -1824,12 +1816,11 @@ aliases.
type_params=[
TypeVar(
name='T',
- bound=Name(id='int', ctx=Load()),
- default_value=Name(id='bool', ctx=Load()))],
+ bound=Name(id='int'),
+ default_value=Name(id='bool'))],
value=Subscript(
- value=Name(id='list', ctx=Load()),
- slice=Name(id='T', ctx=Load()),
- ctx=Load()))])
+ value=Name(id='list'),
+ slice=Name(id='T')))])
.. versionadded:: 3.12
@@ -1854,17 +1845,14 @@ aliases.
name='P',
default_value=List(
elts=[
- Name(id='int', ctx=Load()),
- Name(id='str', ctx=Load())],
- ctx=Load()))],
+ Name(id='int'),
+ Name(id='str')]))],
value=Subscript(
- value=Name(id='Callable', ctx=Load()),
+ value=Name(id='Callable'),
slice=Tuple(
elts=[
- Name(id='P', ctx=Load()),
- Name(id='int', ctx=Load())],
- ctx=Load()),
- ctx=Load()))])
+ Name(id='P'),
+ Name(id='int')])))])
.. versionadded:: 3.12
@@ -1885,18 +1873,13 @@ aliases.
TypeAlias(
name=Name(id='Alias', ctx=Store()),
type_params=[
- TypeVarTuple(
- name='Ts',
- default_value=Tuple(ctx=Load()))],
+ TypeVarTuple(name='Ts', default_value=Tuple())],
value=Subscript(
- value=Name(id='tuple', ctx=Load()),
+ value=Name(id='tuple'),
slice=Tuple(
elts=[
Starred(
- value=Name(id='Ts', ctx=Load()),
- ctx=Load())],
- ctx=Load()),
- ctx=Load()))])
+ value=Name(id='Ts'))])))])
.. versionadded:: 3.12
@@ -2001,8 +1984,8 @@ Function and class definitions
body=[
Pass()],
decorator_list=[
- Name(id='decorator1', ctx=Load()),
- Name(id='decorator2', ctx=Load())],
+ Name(id='decorator1'),
+ Name(id='decorator2')],
returns=Constant(value='return annotation'))])
@@ -2032,14 +2015,14 @@ Function and class definitions
body=[
Expr(
value=Yield(
- value=Name(id='x', ctx=Load())))])
+ value=Name(id='x')))])
>>> print(ast.dump(ast.parse('yield from x'), indent=4))
Module(
body=[
Expr(
value=YieldFrom(
- value=Name(id='x', ctx=Load())))])
+ value=Name(id='x')))])
.. class:: Global(names)
@@ -2094,17 +2077,17 @@ Function and class definitions
ClassDef(
name='Foo',
bases=[
- Name(id='base1', ctx=Load()),
- Name(id='base2', ctx=Load())],
+ Name(id='base1'),
+ Name(id='base2')],
keywords=[
keyword(
arg='metaclass',
- value=Name(id='meta', ctx=Load()))],
+ value=Name(id='meta'))],
body=[
Pass()],
decorator_list=[
- Name(id='decorator1', ctx=Load()),
- Name(id='decorator2', ctx=Load())])])
+ Name(id='decorator1'),
+ Name(id='decorator2')])])
.. versionchanged:: 3.12
Added ``type_params``.
@@ -2141,7 +2124,7 @@ Async and await
Expr(
value=Await(
value=Call(
- func=Name(id='other_func', ctx=Load()))))])])
+ func=Name(id='other_func'))))])])
.. class:: AsyncFor(target, iter, body, orelse, type_comment)
@@ -2402,7 +2385,7 @@ and classes for traversing abstract syntax trees:
def visit_Name(self, node):
return Subscript(
- value=Name(id='data', ctx=Load()),
+ value=Name(id='data'),
slice=Constant(value=node.id),
ctx=node.ctx
)
@@ -2445,8 +2428,26 @@ and classes for traversing abstract syntax trees:
indents that many spaces per level. If *indent* is a string (such as ``"\t"``),
that string is used to indent each level.
- If *show_empty* is ``False`` (the default), empty lists and fields that are ``None``
- will be omitted from the output.
+ If *show_empty* is false (the default), optional empty lists and
+ ``Load()`` values will be omitted from the output.
+ Optional ``None`` values are always omitted.
+
+ .. doctest::
+
+ >>> tree = ast.parse('print(None)', '?', 'eval')
+ >>> print(ast.dump(tree, indent=4))
+ Expression(
+ body=Call(
+ func=Name(id='print'),
+ args=[
+ Constant(value=None)]))
+ >>> print(ast.dump(tree, indent=4, show_empty=True))
+ Expression(
+ body=Call(
+ func=Name(id='print', ctx=Load()),
+ args=[
+ Constant(value=None)],
+ keywords=[]))
.. versionchanged:: 3.9
Added the *indent* option.
@@ -2454,32 +2455,8 @@ and classes for traversing abstract syntax trees:
.. versionchanged:: 3.13
Added the *show_empty* option.
- .. doctest::
-
- >>> print(ast.dump(ast.parse("""\
- ... async def f():
- ... await other_func()
- ... """), indent=4, show_empty=True))
- Module(
- body=[
- AsyncFunctionDef(
- name='f',
- args=arguments(
- posonlyargs=[],
- args=[],
- kwonlyargs=[],
- kw_defaults=[],
- defaults=[]),
- body=[
- Expr(
- value=Await(
- value=Call(
- func=Name(id='other_func', ctx=Load()),
- args=[],
- keywords=[])))],
- decorator_list=[],
- type_params=[])],
- type_ignores=[])
+ .. versionchanged:: next
+ Omit optional ``Load()`` values by default.
.. _ast-compiler-flags:
diff --git a/Doc/library/compileall.rst b/Doc/library/compileall.rst
index c42288419c4..ebbbf857e71 100644
--- a/Doc/library/compileall.rst
+++ b/Doc/library/compileall.rst
@@ -56,11 +56,18 @@ compile Python sources.
executed.
.. option:: -s strip_prefix
+
+ Remove the given prefix from paths recorded in the ``.pyc`` files.
+ Paths are made relative to the prefix.
+
+ This option can be used with ``-p`` but not with ``-d``.
+
.. option:: -p prepend_prefix
- Remove (``-s``) or append (``-p``) the given prefix of paths
- recorded in the ``.pyc`` files.
- Cannot be combined with ``-d``.
+ Prepend the given prefix to paths recorded in the ``.pyc`` files.
+ Use ``-p /`` to make the paths absolute.
+
+ This option can be used with ``-s`` but not with ``-d``.
.. option:: -x regex
diff --git a/Doc/library/compression.zstd.rst b/Doc/library/compression.zstd.rst
index 1e1802155a1..57ad8e3377f 100644
--- a/Doc/library/compression.zstd.rst
+++ b/Doc/library/compression.zstd.rst
@@ -247,6 +247,27 @@ Compressing and decompressing data in memory
The *mode* argument is a :class:`ZstdCompressor` attribute, either
:attr:`~.FLUSH_BLOCK`, or :attr:`~.FLUSH_FRAME`.
+ .. method:: set_pledged_input_size(size)
+
+ Specify the amount of uncompressed data *size* that will be provided for
+ the next frame. *size* will be written into the frame header of the next
+ frame unless :attr:`CompressionParameter.content_size_flag` is ``False``
+ or ``0``. A size of ``0`` means that the frame is empty. If *size* is
+ ``None``, the frame header will omit the frame size. Frames that include
+ the uncompressed data size require less memory to decompress, especially
+ at higher compression levels.
+
+ If :attr:`last_mode` is not :attr:`FLUSH_FRAME`, a
+ :exc:`ValueError` is raised as the compressor is not at the start of
+ a frame. If the pledged size does not match the actual size of data
+ provided to :meth:`.compress`, future calls to :meth:`!compress` or
+ :meth:`flush` may raise :exc:`ZstdError` and the last chunk of data may
+ be lost.
+
+ After :meth:`flush` or :meth:`.compress` are called with mode
+ :attr:`FLUSH_FRAME`, the next frame will not include the frame size into
+ the header unless :meth:`!set_pledged_input_size` is called again.
+
.. attribute:: CONTINUE
Collect more data for compression, which may or may not generate output
@@ -266,6 +287,13 @@ Compressing and decompressing data in memory
:meth:`~.compress` will be written into a new frame and
*cannot* reference past data.
+ .. attribute:: last_mode
+
+ The last mode passed to either :meth:`~.compress` or :meth:`~.flush`.
+ The value can be one of :attr:`~.CONTINUE`, :attr:`~.FLUSH_BLOCK`, or
+ :attr:`~.FLUSH_FRAME`. The initial value is :attr:`~.FLUSH_FRAME`,
+ signifying that the compressor is at the start of a new frame.
+
.. class:: ZstdDecompressor(zstd_dict=None, options=None)
@@ -615,6 +643,29 @@ Advanced parameter control
A value of zero causes the value to be selected automatically.
+ .. attribute:: content_size_flag
+
+ Write the size of the data to be compressed into the Zstandard frame
+ header when known prior to compressing.
+
+ This flag only takes effect under the following scenarios:
+
+ * Calling :func:`compress` for one-shot compression
+ * Providing all of the data to be compressed in the frame in a single
+ :meth:`ZstdCompressor.compress` call, with the
+ :attr:`ZstdCompressor.FLUSH_FRAME` mode.
+ * Calling :meth:`ZstdCompressor.set_pledged_input_size` with the exact
+ amount of data that will be provided to the compressor prior to any
+ calls to :meth:`ZstdCompressor.compress` for the current frame.
+ :meth:`!ZstdCompressor.set_pledged_input_size` must be called for each
+ new frame.
+
+ All other compression calls may not write the size information into the
+ frame header.
+
+ ``True`` or ``1`` enable the content size flag while ``False`` or ``0``
+ disable it.
+
.. attribute:: checksum_flag
A four-byte checksum using XXHash64 of the uncompressed content is
diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst
index 533cdf13974..5297be17bd7 100644
--- a/Doc/library/csv.rst
+++ b/Doc/library/csv.rst
@@ -70,7 +70,7 @@ The :mod:`csv` module defines the following functions:
section :ref:`csv-fmt-params`.
Each row read from the csv file is returned as a list of strings. No
- automatic data type conversion is performed unless the ``QUOTE_NONNUMERIC`` format
+ automatic data type conversion is performed unless the :data:`QUOTE_NONNUMERIC` format
option is specified (in which case unquoted fields are transformed into floats).
A short usage example::
@@ -331,8 +331,14 @@ The :mod:`csv` module defines the following constants:
Instructs :class:`writer` objects to quote all non-numeric fields.
- Instructs :class:`reader` objects to convert all non-quoted fields to type *float*.
+ Instructs :class:`reader` objects to convert all non-quoted fields to type :class:`float`.
+ .. note::
+ Some numeric types, such as :class:`bool`, :class:`~fractions.Fraction`,
+ or :class:`~enum.IntEnum`, have a string representation that cannot be
+ converted to :class:`float`.
+ They cannot be read in the :data:`QUOTE_NONNUMERIC` and
+ :data:`QUOTE_STRINGS` modes.
.. data:: QUOTE_NONE
diff --git a/Doc/library/dbm.rst b/Doc/library/dbm.rst
index 36221c026d6..39e287b1521 100644
--- a/Doc/library/dbm.rst
+++ b/Doc/library/dbm.rst
@@ -15,10 +15,16 @@
* :mod:`dbm.ndbm`
If none of these modules are installed, the
-slow-but-simple implementation in module :mod:`dbm.dumb` will be used. There
+slow-but-simple implementation in module :mod:`dbm.dumb` will be used. There
is a `third party interface <https://www.jcea.es/programacion/pybsddb.htm>`_ to
the Oracle Berkeley DB.
+.. note::
+ None of the underlying modules will automatically shrink the disk space used by
+ the database file. However, :mod:`dbm.sqlite3`, :mod:`dbm.gnu` and :mod:`dbm.dumb`
+ provide a :meth:`!reorganize` method that can be used for this purpose.
+
+
.. exception:: error
A tuple containing the exceptions that can be raised by each of the supported
@@ -186,6 +192,17 @@ or any other SQLite browser, including the SQLite CLI.
The Unix file access mode of the file (default: octal ``0o666``),
used only when the database has to be created.
+ .. method:: sqlite3.reorganize()
+
+ If you have carried out a lot of deletions and would like to shrink the space
+ used on disk, this method will reorganize the database; otherwise, deleted file
+ space will be kept and reused as new (key, value) pairs are added.
+
+ .. note::
+ While reorganizing, as much as two times the size of the original database is required
+ in free disk space. However, be aware that this factor changes for each :mod:`dbm` submodule.
+
+ .. versionadded:: next
:mod:`dbm.gnu` --- GNU database manager
---------------------------------------
@@ -237,6 +254,9 @@ functionality like crash tolerance.
* ``'s'``: Synchronized mode.
Changes to the database will be written immediately to the file.
* ``'u'``: Do not lock database.
+ * ``'m'``: Do not use :manpage:`mmap(2)`.
+ This may harm performance, but improve crash tolerance.
+ .. versionadded:: next
Not all flags are valid for all versions of GDBM.
See the :data:`open_flags` member for a list of supported flag characters.
@@ -284,6 +304,10 @@ functionality like crash tolerance.
reorganization; otherwise, deleted file space will be kept and reused as new
(key, value) pairs are added.
+ .. note::
+ While reorganizing, as much as one time the size of the original database is required
+ in free disk space. However, be aware that this factor changes for each :mod:`dbm` submodule.
+
.. method:: gdbm.sync()
When the database has been opened in fast mode, this method forces any
@@ -438,6 +462,11 @@ The :mod:`!dbm.dumb` module defines the following:
with a sufficiently large/complex entry due to stack depth limitations in
Python's AST compiler.
+ .. warning::
+ :mod:`dbm.dumb` does not support concurrent read/write access. (Multiple
+ simultaneous read accesses are safe.) When a program has the database open
+ for writing, no other program should have it open for reading or writing.
+
.. versionchanged:: 3.5
:func:`~dbm.dumb.open` always creates a new database when *flag* is ``'n'``.
@@ -460,3 +489,15 @@ The :mod:`!dbm.dumb` module defines the following:
.. method:: dumbdbm.close()
Close the database.
+
+ .. method:: dumbdbm.reorganize()
+
+ If you have carried out a lot of deletions and would like to shrink the space
+ used on disk, this method will reorganize the database; otherwise, deleted file
+ space will not be reused.
+
+ .. note::
+ While reorganizing, no additional free disk space is required. However, be aware
+ that this factor changes for each :mod:`dbm` submodule.
+
+ .. versionadded:: next
diff --git a/Doc/library/doctest.rst b/Doc/library/doctest.rst
index b86fef9fd6f..fb43cf918b8 100644
--- a/Doc/library/doctest.rst
+++ b/Doc/library/doctest.rst
@@ -174,7 +174,7 @@ with assorted summaries at the end.
You can force verbose mode by passing ``verbose=True`` to :func:`testmod`, or
prohibit it by passing ``verbose=False``. In either of those cases,
-``sys.argv`` is not examined by :func:`testmod` (so passing ``-v`` or not
+:data:`sys.argv` is not examined by :func:`testmod` (so passing ``-v`` or not
has no effect).
There is also a command line shortcut for running :func:`testmod`, see section
@@ -231,7 +231,7 @@ documentation::
As with :func:`testmod`, :func:`testfile` won't display anything unless an
example fails. If an example does fail, then the failing example(s) and the
cause(s) of the failure(s) are printed to stdout, using the same format as
-:func:`testmod`.
+:func:`!testmod`.
By default, :func:`testfile` looks for files in the calling module's directory.
See section :ref:`doctest-basic-api` for a description of the optional arguments
@@ -311,6 +311,9 @@ Which Docstrings Are Examined?
The module docstring, and all function, class and method docstrings are
searched. Objects imported into the module are not searched.
+.. attribute:: module.__test__
+ :no-typesetting:
+
In addition, there are cases when you want tests to be part of a module but not part
of the help text, which requires that the tests not be included in the docstring.
Doctest looks for a module-level variable called ``__test__`` and uses it to locate other
@@ -533,7 +536,7 @@ Some details you should read once, but won't need to remember:
* The interactive shell omits the traceback header line for some
:exc:`SyntaxError`\ s. But doctest uses the traceback header line to
distinguish exceptions from non-exceptions. So in the rare case where you need
- to test a :exc:`SyntaxError` that omits the traceback header, you will need to
+ to test a :exc:`!SyntaxError` that omits the traceback header, you will need to
manually add the traceback header line to your test example.
.. index:: single: ^ (caret); marker
@@ -860,15 +863,15 @@ The :const:`ELLIPSIS` directive gives a nice approach for the last example:
<C object at 0x...>
Floating-point numbers are also subject to small output variations across
-platforms, because Python defers to the platform C library for float formatting,
-and C libraries vary widely in quality here. ::
+platforms, because Python defers to the platform C library for some
+floating-point calculations, and C libraries vary widely in quality here. ::
- >>> 1./7 # risky
- 0.14285714285714285
- >>> print(1./7) # safer
- 0.142857142857
- >>> print(round(1./7, 6)) # much safer
- 0.142857
+ >>> 1000**0.1 # risky
+ 1.9952623149688797
+ >>> round(1000**0.1, 9) # safer
+ 1.995262315
+ >>> print(f'{1000**0.1:.4f}') # much safer
+ 1.9953
Numbers of the form ``I/2.**J`` are safe across all platforms, and I often
contrive doctest examples to produce numbers of that form::
@@ -938,13 +941,13 @@ and :ref:`doctest-simple-testfile`.
Optional argument *verbose* prints lots of stuff if true, and prints only
failures if false; by default, or if ``None``, it's true if and only if ``'-v'``
- is in ``sys.argv``.
+ is in :data:`sys.argv`.
Optional argument *report* prints a summary at the end when true, else prints
nothing at the end. In verbose mode, the summary is detailed, else the summary
is very brief (in fact, empty if all tests passed).
- Optional argument *optionflags* (default value 0) takes the
+ Optional argument *optionflags* (default value ``0``) takes the
:ref:`bitwise OR <bitwise>` of option flags.
See section :ref:`doctest-options`.
@@ -1043,12 +1046,15 @@ from text files and modules with doctests:
Convert doctest tests from one or more text files to a
:class:`unittest.TestSuite`.
- The returned :class:`unittest.TestSuite` is to be run by the unittest framework
- and runs the interactive examples in each file. If an example in any file
- fails, then the synthesized unit test fails, and a :exc:`failureException`
- exception is raised showing the name of the file containing the test and a
- (sometimes approximate) line number. If all the examples in a file are
- skipped, then the synthesized unit test is also marked as skipped.
+ The returned :class:`unittest.TestSuite` is to be run by the unittest
+ framework and runs the interactive examples in each file.
+ Each file is run as a separate unit test, and each example in a file
+ is run as a :ref:`subtest <subtests>`.
+ If any example in a file fails, then the synthesized unit test fails.
+ The traceback for failure or error contains the name of the file
+ containing the test and a (sometimes approximate) line number.
+ If all the examples in a file are skipped, then the synthesized unit
+ test is also marked as skipped.
Pass one or more paths (as strings) to text files to be examined.
@@ -1078,13 +1084,14 @@ from text files and modules with doctests:
Optional argument *setUp* specifies a set-up function for the test suite.
This is called before running the tests in each file. The *setUp* function
- will be passed a :class:`DocTest` object. The setUp function can access the
- test globals as the *globs* attribute of the test passed.
+ will be passed a :class:`DocTest` object. The *setUp* function can access the
+ test globals as the :attr:`~DocTest.globs` attribute of the test passed.
Optional argument *tearDown* specifies a tear-down function for the test
suite. This is called after running the tests in each file. The *tearDown*
- function will be passed a :class:`DocTest` object. The setUp function can
- access the test globals as the *globs* attribute of the test passed.
+ function will be passed a :class:`DocTest` object. The *tearDown* function can
+ access the test globals as the :attr:`~DocTest.globs` attribute of the test
+ passed.
Optional argument *globs* is a dictionary containing the initial global
variables for the tests. A new copy of this dictionary is created for each
@@ -1105,16 +1112,22 @@ from text files and modules with doctests:
The global ``__file__`` is added to the globals provided to doctests loaded
from a text file using :func:`DocFileSuite`.
+ .. versionchanged:: next
+ Run each example as a :ref:`subtest <subtests>`.
+
.. function:: DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, setUp=None, tearDown=None, optionflags=0, checker=None)
Convert doctest tests for a module to a :class:`unittest.TestSuite`.
- The returned :class:`unittest.TestSuite` is to be run by the unittest framework
- and runs each doctest in the module. If any of the doctests fail, then the
- synthesized unit test fails, and a :exc:`failureException` exception is raised
- showing the name of the file containing the test and a (sometimes approximate)
- line number. If all the examples in a docstring are skipped, then the
+ The returned :class:`unittest.TestSuite` is to be run by the unittest
+ framework and runs each doctest in the module.
+ Each docstring is run as a separate unit test, and each example in
+ a docstring is run as a :ref:`subtest <subtests>`.
+ If any of the doctests fail, then the synthesized unit test fails.
+ The traceback for failure or error contains the name of the file
+ containing the test and a (sometimes approximate) line number.
+ If all the examples in a docstring are skipped, then the
synthesized unit test is also marked as skipped.
Optional argument *module* provides the module to be tested. It can be a module
@@ -1123,7 +1136,7 @@ from text files and modules with doctests:
Optional argument *globs* is a dictionary containing the initial global
variables for the tests. A new copy of this dictionary is created for each
- test. By default, *globs* is a new empty dictionary.
+ test. By default, *globs* is the module's :attr:`~module.__dict__`.
Optional argument *extraglobs* specifies an extra set of global variables, which
is merged into *globs*. By default, no extra globals are used.
@@ -1132,7 +1145,7 @@ from text files and modules with doctests:
drop-in replacement) that is used to extract doctests from the module.
Optional arguments *setUp*, *tearDown*, and *optionflags* are the same as for
- function :func:`DocFileSuite` above.
+ function :func:`DocFileSuite` above, but they are called for each docstring.
This function uses the same search technique as :func:`testmod`.
@@ -1140,11 +1153,8 @@ from text files and modules with doctests:
:func:`DocTestSuite` returns an empty :class:`unittest.TestSuite` if *module*
contains no docstrings instead of raising :exc:`ValueError`.
-.. exception:: failureException
-
- When doctests which have been converted to unit tests by :func:`DocFileSuite`
- or :func:`DocTestSuite` fail, this exception is raised showing the name of
- the file containing the test and a (sometimes approximate) line number.
+ .. versionchanged:: next
+ Run each example as a :ref:`subtest <subtests>`.
Under the covers, :func:`DocTestSuite` creates a :class:`unittest.TestSuite` out
of :class:`!doctest.DocTestCase` instances, and :class:`!DocTestCase` is a
@@ -1158,15 +1168,15 @@ of :class:`!DocTestCase`.
So both ways of creating a :class:`unittest.TestSuite` run instances of
:class:`!DocTestCase`. This is important for a subtle reason: when you run
-:mod:`doctest` functions yourself, you can control the :mod:`doctest` options in
-use directly, by passing option flags to :mod:`doctest` functions. However, if
-you're writing a :mod:`unittest` framework, :mod:`unittest` ultimately controls
+:mod:`doctest` functions yourself, you can control the :mod:`!doctest` options in
+use directly, by passing option flags to :mod:`!doctest` functions. However, if
+you're writing a :mod:`unittest` framework, :mod:`!unittest` ultimately controls
when and how tests get run. The framework author typically wants to control
-:mod:`doctest` reporting options (perhaps, e.g., specified by command line
-options), but there's no way to pass options through :mod:`unittest` to
-:mod:`doctest` test runners.
+:mod:`!doctest` reporting options (perhaps, e.g., specified by command line
+options), but there's no way to pass options through :mod:`!unittest` to
+:mod:`!doctest` test runners.
-For this reason, :mod:`doctest` also supports a notion of :mod:`doctest`
+For this reason, :mod:`doctest` also supports a notion of :mod:`!doctest`
reporting flags specific to :mod:`unittest` support, via this function:
@@ -1181,12 +1191,12 @@ reporting flags specific to :mod:`unittest` support, via this function:
:mod:`unittest`: the :meth:`!runTest` method of :class:`!DocTestCase` looks at
the option flags specified for the test case when the :class:`!DocTestCase`
instance was constructed. If no reporting flags were specified (which is the
- typical and expected case), :mod:`!doctest`'s :mod:`unittest` reporting flags are
+ typical and expected case), :mod:`!doctest`'s :mod:`!unittest` reporting flags are
:ref:`bitwise ORed <bitwise>` into the option flags, and the option flags
so augmented are passed to the :class:`DocTestRunner` instance created to
run the doctest. If any reporting flags were specified when the
:class:`!DocTestCase` instance was constructed, :mod:`!doctest`'s
- :mod:`unittest` reporting flags are ignored.
+ :mod:`!unittest` reporting flags are ignored.
The value of the :mod:`unittest` reporting flags in effect before the function
was called is returned by the function.
@@ -1279,7 +1289,7 @@ DocTest Objects
.. attribute:: filename
The name of the file that this :class:`DocTest` was extracted from; or
- ``None`` if the filename is unknown, or if the :class:`DocTest` was not
+ ``None`` if the filename is unknown, or if the :class:`!DocTest` was not
extracted from a file.
@@ -1419,10 +1429,10 @@ DocTestFinder objects
The globals for each :class:`DocTest` is formed by combining *globs* and
*extraglobs* (bindings in *extraglobs* override bindings in *globs*). A new
- shallow copy of the globals dictionary is created for each :class:`DocTest`.
- If *globs* is not specified, then it defaults to the module's *__dict__*, if
- specified, or ``{}`` otherwise. If *extraglobs* is not specified, then it
- defaults to ``{}``.
+ shallow copy of the globals dictionary is created for each :class:`!DocTest`.
+ If *globs* is not specified, then it defaults to the module's
+ :attr:`~module.__dict__`, if specified, or ``{}`` otherwise.
+ If *extraglobs* is not specified, then it defaults to ``{}``.
.. _doctest-doctestparser:
@@ -1446,7 +1456,7 @@ DocTestParser objects
:class:`DocTest` object.
*globs*, *name*, *filename*, and *lineno* are attributes for the new
- :class:`DocTest` object. See the documentation for :class:`DocTest` for more
+ :class:`!DocTest` object. See the documentation for :class:`DocTest` for more
information.
@@ -1461,7 +1471,7 @@ DocTestParser objects
Divide the given string into examples and intervening text, and return them as
a list of alternating :class:`Example`\ s and strings. Line numbers for the
- :class:`Example`\ s are 0-based. The optional argument *name* is a name
+ :class:`!Example`\ s are 0-based. The optional argument *name* is a name
identifying this string, and is only used for error messages.
@@ -1501,14 +1511,14 @@ DocTestRunner objects
:class:`OutputChecker`. This comparison may be customized with a number of
option flags; see section :ref:`doctest-options` for more information. If the
option flags are insufficient, then the comparison may also be customized by
- passing a subclass of :class:`OutputChecker` to the constructor.
+ passing a subclass of :class:`!OutputChecker` to the constructor.
The test runner's display output can be controlled in two ways. First, an output
function can be passed to :meth:`run`; this function will be called
with strings that should be displayed. It defaults to ``sys.stdout.write``. If
capturing the output is not sufficient, then the display output can be also
customized by subclassing DocTestRunner, and overriding the methods
- :meth:`report_start`, :meth:`report_success`,
+ :meth:`report_skip`, :meth:`report_start`, :meth:`report_success`,
:meth:`report_unexpected_exception`, and :meth:`report_failure`.
The optional keyword argument *checker* specifies the :class:`OutputChecker`
@@ -1533,6 +1543,19 @@ DocTestRunner objects
:class:`DocTestRunner` defines the following methods:
+ .. method:: report_skip(out, test, example)
+
+ Report that the given example was skipped. This method is provided to
+ allow subclasses of :class:`DocTestRunner` to customize their output; it
+ should not be called directly.
+
+ *example* is the example about to be processed. *test* is the test
+ containing *example*. *out* is the output function that was passed to
+ :meth:`DocTestRunner.run`.
+
+ .. versionadded:: next
+
+
.. method:: report_start(out, test, example)
Report that the test runner is about to process the given example. This method
@@ -1540,7 +1563,7 @@ DocTestRunner objects
output; it should not be called directly.
*example* is the example about to be processed. *test* is the test
- *containing example*. *out* is the output function that was passed to
+ containing *example*. *out* is the output function that was passed to
:meth:`DocTestRunner.run`.
@@ -1940,7 +1963,7 @@ several options for organizing tests:
containing test cases for the named topics. These functions can be included in
the same file as the module, or separated out into a separate test file.
-* Define a ``__test__`` dictionary mapping from regression test topics to
+* Define a :attr:`~module.__test__` dictionary mapping from regression test topics to
docstrings containing test cases.
When you have placed your tests in a module, the module can itself be the test
diff --git a/Doc/library/hashlib.rst b/Doc/library/hashlib.rst
index 4818a4944a5..8bba6700930 100644
--- a/Doc/library/hashlib.rst
+++ b/Doc/library/hashlib.rst
@@ -94,6 +94,13 @@ accessible by name via :func:`new`. See :data:`algorithms_available`.
OpenSSL does not provide we fall back to a verified implementation from
the `HACL\* project`_.
+.. deprecated-removed:: 3.15 3.19
+ The undocumented ``string`` keyword parameter in :func:`!_hashlib.new`
+ and hash-named constructors such as :func:`!_md5.md5` is deprecated.
+ Prefer passing the initial data as a positional argument for maximum
+ backwards compatibility.
+
+
Usage
-----
diff --git a/Doc/library/math.rst b/Doc/library/math.rst
index 11d3b756e21..c8061fb1638 100644
--- a/Doc/library/math.rst
+++ b/Doc/library/math.rst
@@ -53,6 +53,8 @@ noted otherwise, all return values are floats.
:func:`frexp(x) <frexp>` Mantissa and exponent of *x*
:func:`isclose(a, b, rel_tol, abs_tol) <isclose>` Check if the values *a* and *b* are close to each other
:func:`isfinite(x) <isfinite>` Check if *x* is neither an infinity nor a NaN
+:func:`isnormal(x) <isnormal>` Check if *x* is a normal number
+:func:`issubnormal(x) <issubnormal>` Check if *x* is a subnormal number
:func:`isinf(x) <isinf>` Check if *x* is a positive or negative infinity
:func:`isnan(x) <isnan>` Check if *x* is a NaN (not a number)
:func:`ldexp(x, i) <ldexp>` ``x * (2**i)``, inverse of function :func:`frexp`
@@ -373,6 +375,24 @@ Floating point manipulation functions
.. versionadded:: 3.2
+.. function:: isnormal(x)
+
+ Return ``True`` if *x* is a normal number, that is a finite
+ nonzero number that is not a subnormal (see :func:`issubnormal`).
+ Return ``False`` otherwise.
+
+ .. versionadded:: next
+
+
+.. function:: issubnormal(x)
+
+ Return ``True`` if *x* is a subnormal number, that is a finite
+ nonzero number with a magnitude smaller than the smallest positive normal
+ number, see :data:`sys.float_info.min`. Return ``False`` otherwise.
+
+ .. versionadded:: next
+
+
.. function:: isinf(x)
Return ``True`` if *x* is a positive or negative infinity, and
diff --git a/Doc/library/os.path.rst b/Doc/library/os.path.rst
index ecbbc1d7605..f72aee19d8f 100644
--- a/Doc/library/os.path.rst
+++ b/Doc/library/os.path.rst
@@ -408,9 +408,26 @@ the :mod:`glob` module.)
system). On Windows, this function will also resolve MS-DOS (also called 8.3)
style names such as ``C:\\PROGRA~1`` to ``C:\\Program Files``.
- If a path doesn't exist or a symlink loop is encountered, and *strict* is
- ``True``, :exc:`OSError` is raised. If *strict* is ``False`` these errors
- are ignored, and so the result might be missing or otherwise inaccessible.
+ By default, the path is evaluated up to the first component that does not
+ exist, is a symlink loop, or whose evaluation raises :exc:`OSError`.
+ All such components are appended unchanged to the existing part of the path.
+
+ Some errors that are handled this way include "access denied", "not a
+ directory", or "bad argument to internal function". Thus, the
+ resulting path may be missing or inaccessible, may still contain
+ links or loops, and may traverse non-directories.
+
+ This behavior can be modified by keyword arguments:
+
+ If *strict* is ``True``, the first error encountered when evaluating the path is
+ re-raised.
+ In particular, :exc:`FileNotFoundError` is raised if *path* does not exist,
+ or another :exc:`OSError` if it is otherwise inaccessible.
+
+ If *strict* is :py:data:`os.path.ALLOW_MISSING`, errors other than
+ :exc:`FileNotFoundError` are re-raised (as with ``strict=True``).
+ Thus, the returned path will not contain any symbolic links, but the named
+ file and some of its parent directories may be missing.
.. note::
This function emulates the operating system's procedure for making a path
@@ -429,6 +446,15 @@ the :mod:`glob` module.)
.. versionchanged:: 3.10
The *strict* parameter was added.
+ .. versionchanged:: next
+ The :py:data:`~os.path.ALLOW_MISSING` value for the *strict* parameter
+ was added.
+
+.. data:: ALLOW_MISSING
+
+ Special value used for the *strict* argument in :func:`realpath`.
+
+ .. versionadded:: next
.. function:: relpath(path, start=os.curdir)
diff --git a/Doc/library/shelve.rst b/Doc/library/shelve.rst
index 6e74a59b82b..23a2e0c3d0c 100644
--- a/Doc/library/shelve.rst
+++ b/Doc/library/shelve.rst
@@ -75,8 +75,15 @@ Two additional methods are supported:
Write back all entries in the cache if the shelf was opened with *writeback*
set to :const:`True`. Also empty the cache and synchronize the persistent
- dictionary on disk, if feasible. This is called automatically when the shelf
- is closed with :meth:`close`.
+ dictionary on disk, if feasible. This is called automatically when
+ :meth:`reorganize` is called or the shelf is closed with :meth:`close`.
+
+.. method:: Shelf.reorganize()
+
+ Calls :meth:`sync` and attempts to shrink space used on disk by removing empty
+ space resulting from deletions.
+
+ .. versionadded:: next
.. method:: Shelf.close()
@@ -116,6 +123,11 @@ Restrictions
* On macOS :mod:`dbm.ndbm` can silently corrupt the database file on updates,
which can cause hard crashes when trying to read from the database.
+* :meth:`Shelf.reorganize` may not be available for all database packages and
+ may temporarely increase resource usage (especially disk space) when called.
+ Additionally, it will never run automatically and instead needs to be called
+ explicitly.
+
.. class:: Shelf(dict, protocol=None, writeback=False, keyencoding='utf-8')
diff --git a/Doc/library/socket.rst b/Doc/library/socket.rst
index 75fd637045d..bc89a3228f0 100644
--- a/Doc/library/socket.rst
+++ b/Doc/library/socket.rst
@@ -1492,7 +1492,7 @@ The :mod:`socket` module also offers various network-related services:
The *fds* parameter is a sequence of file descriptors.
Consult :meth:`~socket.sendmsg` for the documentation of these parameters.
- .. availability:: Unix, Windows, not WASI.
+ .. availability:: Unix, not WASI.
Unix platforms supporting :meth:`~socket.sendmsg`
and :const:`SCM_RIGHTS` mechanism.
@@ -1506,9 +1506,9 @@ The :mod:`socket` module also offers various network-related services:
Return ``(msg, list(fds), flags, addr)``.
Consult :meth:`~socket.recvmsg` for the documentation of these parameters.
- .. availability:: Unix, Windows, not WASI.
+ .. availability:: Unix, not WASI.
- Unix platforms supporting :meth:`~socket.sendmsg`
+ Unix platforms supporting :meth:`~socket.recvmsg`
and :const:`SCM_RIGHTS` mechanism.
.. versionadded:: 3.9
diff --git a/Doc/library/string.rst b/Doc/library/string.rst
index c4012483a52..23e15780075 100644
--- a/Doc/library/string.rst
+++ b/Doc/library/string.rst
@@ -328,7 +328,7 @@ The general form of a *standard format specifier* is:
sign: "+" | "-" | " "
width_and_precision: [`width_with_grouping`][`precision_with_grouping`]
width_with_grouping: [`width`][`grouping`]
- precision_with_grouping: "." [`precision`][`grouping`]
+ precision_with_grouping: "." [`precision`][`grouping`] | "." `grouping`
width: `~python-grammar:digit`+
precision: `~python-grammar:digit`+
grouping: "," | "_"
diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst
index f9cb5495e60..7cec108a5bd 100644
--- a/Doc/library/tarfile.rst
+++ b/Doc/library/tarfile.rst
@@ -255,6 +255,15 @@ The :mod:`tarfile` module defines the following exceptions:
Raised to refuse extracting a symbolic link pointing outside the destination
directory.
+.. exception:: LinkFallbackError
+
+ Raised to refuse emulating a link (hard or symbolic) by extracting another
+ archive member, when that member would be rejected by the filter location.
+ The exception that was raised to reject the replacement member is available
+ as :attr:`!BaseException.__context__`.
+
+ .. versionadded:: next
+
The following constants are available at the module level:
@@ -1068,6 +1077,12 @@ reused in custom filters:
Implements the ``'data'`` filter.
In addition to what ``tar_filter`` does:
+ - Normalize link targets (:attr:`TarInfo.linkname`) using
+ :func:`os.path.normpath`.
+ Note that this removes internal ``..`` components, which may change the
+ meaning of the link if the path in :attr:`!TarInfo.linkname` traverses
+ symbolic links.
+
- :ref:`Refuse <tarfile-extraction-refuse>` to extract links (hard or soft)
that link to absolute paths, or ones that link outside the destination.
@@ -1099,6 +1114,10 @@ reused in custom filters:
Note that this filter does not block *all* dangerous archive features.
See :ref:`tarfile-further-verification` for details.
+ .. versionchanged:: next
+
+ Link targets are now normalized.
+
.. _tarfile-extraction-refuse:
@@ -1127,6 +1146,7 @@ Here is an incomplete list of things to consider:
* Extract to a :func:`new temporary directory <tempfile.mkdtemp>`
to prevent e.g. exploiting pre-existing links, and to make it easier to
clean up after a failed extraction.
+* Disallow symbolic links if you do not need the functionality.
* When working with untrusted data, use external (e.g. OS-level) limits on
disk, memory and CPU usage.
* Check filenames against an allow-list of characters
diff --git a/Doc/using/android.rst b/Doc/using/android.rst
index 65bf23dc994..cb762310328 100644
--- a/Doc/using/android.rst
+++ b/Doc/using/android.rst
@@ -63,3 +63,12 @@ link to the relevant file.
* Add code to your app to :source:`start Python in embedded mode
<Android/testbed/app/src/main/c/main_activity.c>`. This will need to be C code
called via JNI.
+
+Building a Python package for Android
+-------------------------------------
+
+Python packages can be built for Android as wheels and released on PyPI. The
+recommended tool for doing this is `cibuildwheel
+<https://cibuildwheel.pypa.io/en/stable/platforms/#android>`__, which automates
+all the details of setting up a cross-compilation environment, building the
+wheel, and testing it on an emulator.
diff --git a/Doc/whatsnew/3.14.rst b/Doc/whatsnew/3.14.rst
index 561d1a8914b..45e68aea5fb 100644
--- a/Doc/whatsnew/3.14.rst
+++ b/Doc/whatsnew/3.14.rst
@@ -342,15 +342,16 @@ For example the following expressions are now valid:
.. code-block:: python
try:
- release_new_sleep_token_album()
- except AlbumNotFound, SongsTooGoodToBeReleased:
- print("Sorry, no new album this year.")
+ connect_to_server()
+ except TimeoutError, ConnectionRefusedError:
+ print("Network issue encountered.")
# The same applies to except* (for exception groups):
+
try:
- release_new_sleep_token_album()
- except* AlbumNotFound, SongsTooGoodToBeReleased:
- print("Sorry, no new album this year.")
+ connect_to_server()
+ except* TimeoutError, ConnectionRefusedError:
+ print("Network issue encountered.")
Check :pep:`758` for more details.
@@ -1454,7 +1455,7 @@ math
----
* Added more detailed error messages for domain errors in the module.
- (Contributed by by Charlie Zhao and Sergey B Kirpichev in :gh:`101410`.)
+ (Contributed by Charlie Zhao and Sergey B Kirpichev in :gh:`101410`.)
mimetypes
diff --git a/Doc/whatsnew/3.15.rst b/Doc/whatsnew/3.15.rst
index 6d1f653f086..daf3e8fb6c2 100644
--- a/Doc/whatsnew/3.15.rst
+++ b/Doc/whatsnew/3.15.rst
@@ -89,6 +89,18 @@ New modules
Improved modules
================
+dbm
+---
+
+* Added new :meth:`!reorganize` methods to :mod:`dbm.dumb` and :mod:`dbm.sqlite3`
+ which allow to recover unused free space previously occupied by deleted entries.
+ (Contributed by Andrea Oliveri in :gh:`134004`.)
+
+* Add the ``'m'`` flag for :func:`dbm.gnu.open` which allows to disable
+ the use of :manpage:`mmap(2)`.
+ This may harm performance, but improve crash tolerance.
+ (Contributed by Serhiy Storchaka in :gh:`66234`.)
+
difflib
-------
@@ -96,6 +108,32 @@ difflib
class, and migrated the output to the HTML5 standard.
(Contributed by Jiahao Li in :gh:`134580`.)
+
+math
+----
+
+* Add :func:`math.isnormal` and :func:`math.issubnormal` functions.
+ (Contributed by Sergey B Kirpichev in :gh:`132908`.)
+
+
+os.path
+-------
+
+* The *strict* parameter to :func:`os.path.realpath` accepts a new value,
+ :data:`os.path.ALLOW_MISSING`.
+ If used, errors other than :exc:`FileNotFoundError` will be re-raised;
+ the resulting path can be missing but it will be free of symlinks.
+ (Contributed by Petr Viktorin for :cve:`2025-4517`.)
+
+
+shelve
+------
+
+* Added new :meth:`!reorganize` method to :mod:`shelve` used to recover unused free
+ space previously occupied by deleted entries.
+ (Contributed by Andrea Oliveri in :gh:`134004`.)
+
+
ssl
---
@@ -104,6 +142,28 @@ ssl
(Contributed by Will Childs-Klein in :gh:`133624`.)
+tarfile
+-------
+
+* :func:`~tarfile.data_filter` now normalizes symbolic link targets in order to
+ avoid path traversal attacks.
+ (Contributed by Petr Viktorin in :gh:`127987` and :cve:`2025-4138`.)
+* :func:`~tarfile.TarFile.extractall` now skips fixing up directory attributes
+ when a directory was removed or replaced by another kind of file.
+ (Contributed by Petr Viktorin in :gh:`127987` and :cve:`2024-12718`.)
+* :func:`~tarfile.TarFile.extract` and :func:`~tarfile.TarFile.extractall`
+ now (re-)apply the extraction filter when substituting a link (hard or
+ symbolic) with a copy of another archive member, and when fixing up
+ directory attributes.
+ The former raises a new exception, :exc:`~tarfile.LinkFallbackError`.
+ (Contributed by Petr Viktorin for :cve:`2025-4330` and :cve:`2024-12718`.)
+* :func:`~tarfile.TarFile.extract` and :func:`~tarfile.TarFile.extractall`
+ no longer extract rejected members when
+ :func:`~tarfile.TarFile.errorlevel` is zero.
+ (Contributed by Matt Prodani and Petr Viktorin in :gh:`112887`
+ and :cve:`2025-4435`.)
+
+
zlib
----
@@ -129,8 +189,20 @@ module_name
Deprecated
==========
-* module_name:
- TODO
+hashlib
+-------
+
+* In hash function constructors such as :func:`~hashlib.new` or the
+ direct hash-named constructors such as :func:`~hashlib.md5` and
+ :func:`~hashlib.sha256`, their optional initial data parameter could
+ also be passed a keyword argument named ``data=`` or ``string=`` in
+ various :mod:`hashlib` implementations.
+
+ Support for the ``string`` keyword argument name is now deprecated and
+ is slated for removal in Python 3.19. Prefer passing the initial data as
+ a positional argument for maximum backwards compatibility.
+
+ (Contributed by Bénédikt Tran in :gh:`134978`.)
.. Add deprecations above alphabetically, not here at the end.
diff --git a/Include/abstract.h b/Include/abstract.h
index b9199fc03a3..80f3298701d 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -138,7 +138,12 @@ extern "C" {
Delete attribute named attr_name, for object o. Returns
-1 on failure.
- This is the equivalent of the Python statement: del o.attr_name. */
+ This is the equivalent of the Python statement: del o.attr_name.
+
+ Implemented as a macro in the limited C API 3.12 and older. */
+#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 < 0x030d0000
+# define PyObject_DelAttrString(O, A) PyObject_SetAttrString((O), (A), NULL)
+#endif
/* Implemented elsewhere:
@@ -147,7 +152,12 @@ extern "C" {
Delete attribute named attr_name, for object o. Returns -1
on failure. This is the equivalent of the Python
- statement: del o.attr_name. */
+ statement: del o.attr_name.
+
+ Implemented as a macro in the limited C API 3.12 and older. */
+#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 < 0x030d0000
+# define PyObject_DelAttr(O, A) PyObject_SetAttr((O), (A), NULL)
+#endif
/* Implemented elsewhere:
diff --git a/Include/boolobject.h b/Include/boolobject.h
index 3037e61bbf6..b56e2baecaa 100644
--- a/Include/boolobject.h
+++ b/Include/boolobject.h
@@ -34,9 +34,16 @@ PyAPI_FUNC(int) Py_IsTrue(PyObject *x);
PyAPI_FUNC(int) Py_IsFalse(PyObject *x);
#define Py_IsFalse(x) Py_Is((x), Py_False)
-/* Macros for returning Py_True or Py_False, respectively */
-#define Py_RETURN_TRUE return Py_True
-#define Py_RETURN_FALSE return Py_False
+/* Macros for returning Py_True or Py_False, respectively.
+ * Only treat Py_True and Py_False as immortal in the limited C API 3.12
+ * and newer. */
+#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 < 0x030c0000
+# define Py_RETURN_TRUE return Py_NewRef(Py_True)
+# define Py_RETURN_FALSE return Py_NewRef(Py_False)
+#else
+# define Py_RETURN_TRUE return Py_True
+# define Py_RETURN_FALSE return Py_False
+#endif
/* Function to return a bool from a C long */
PyAPI_FUNC(PyObject *) PyBool_FromLong(long);
diff --git a/Include/internal/mimalloc/mimalloc/internal.h b/Include/internal/mimalloc/mimalloc/internal.h
index 71b7ea702d6..a7daa3a40a4 100644
--- a/Include/internal/mimalloc/mimalloc/internal.h
+++ b/Include/internal/mimalloc/mimalloc/internal.h
@@ -634,10 +634,10 @@ static inline mi_block_t* mi_block_nextx( const void* null, const mi_block_t* bl
mi_track_mem_defined(block,sizeof(mi_block_t));
mi_block_t* next;
#ifdef MI_ENCODE_FREELIST
- next = (mi_block_t*)mi_ptr_decode(null, mi_atomic_load_relaxed(&block->next), keys);
+ next = (mi_block_t*)mi_ptr_decode(null, mi_atomic_load_relaxed((_Atomic(mi_encoded_t)*)&block->next), keys);
#else
MI_UNUSED(keys); MI_UNUSED(null);
- next = (mi_block_t*)mi_atomic_load_relaxed(&block->next);
+ next = (mi_block_t*)mi_atomic_load_relaxed((_Atomic(mi_encoded_t)*)&block->next);
#endif
mi_track_mem_noaccess(block,sizeof(mi_block_t));
return next;
diff --git a/Include/internal/mimalloc/mimalloc/types.h b/Include/internal/mimalloc/mimalloc/types.h
index 4f77bd7bc52..a04169f7fb8 100644
--- a/Include/internal/mimalloc/mimalloc/types.h
+++ b/Include/internal/mimalloc/mimalloc/types.h
@@ -50,6 +50,32 @@ terms of the MIT license. A copy of the license can be found in the file
#define mi_decl_cache_align
#endif
+#if (MI_DEBUG)
+#if defined(_MSC_VER)
+#define mi_decl_noreturn __declspec(noreturn)
+#elif (defined(__GNUC__) && (__GNUC__ >= 3)) || defined(__clang__)
+#define mi_decl_noreturn __attribute__((__noreturn__))
+#else
+#define mi_decl_noreturn
+#endif
+
+/*
+ * 'cold' attribute seems to have been fully supported since GCC 4.x.
+ * See https://github.com/gcc-mirror/gcc/commit/52bf96d2f299e9e6.
+ */
+#if (defined(__GNUC__) && (__GNUC__ >= 4)) || defined(__clang__)
+#define mi_decl_cold __attribute__((cold))
+#else
+#define mi_decl_cold
+#endif
+
+#if (defined(__GNUC__) && defined(__THROW))
+#define mi_decl_throw __THROW
+#else
+#define mi_decl_throw
+#endif
+#endif
+
// ------------------------------------------------------
// Variants
// ------------------------------------------------------
@@ -582,7 +608,8 @@ struct mi_heap_s {
#if (MI_DEBUG)
// use our own assertion to print without memory allocation
-void _mi_assert_fail(const char* assertion, const char* fname, unsigned int line, const char* func );
+mi_decl_noreturn mi_decl_cold mi_decl_throw
+void _mi_assert_fail(const char* assertion, const char* fname, unsigned int line, const char* func);
#define mi_assert(expr) ((expr) ? (void)0 : _mi_assert_fail(#expr,__FILE__,__LINE__,__func__))
#else
#define mi_assert(x)
diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h
index 092feeb40b0..239177deb4a 100644
--- a/Include/internal/pycore_ceval.h
+++ b/Include/internal/pycore_ceval.h
@@ -353,7 +353,8 @@ PyAPI_FUNC(_PyStackRef) _PyFloat_FromDouble_ConsumeInputs(_PyStackRef left, _PyS
extern int _PyRunRemoteDebugger(PyThreadState *tstate);
#endif
-_PyStackRef _PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index);
+PyAPI_FUNC(_PyStackRef)
+_PyForIter_VirtualIteratorNext(PyThreadState* tstate, struct _PyInterpreterFrame* frame, _PyStackRef iter, _PyStackRef *index_ptr);
#ifdef __cplusplus
}
diff --git a/Include/internal/pycore_crossinterp.h b/Include/internal/pycore_crossinterp.h
index 12729274138..713ddc66ba7 100644
--- a/Include/internal/pycore_crossinterp.h
+++ b/Include/internal/pycore_crossinterp.h
@@ -317,7 +317,9 @@ typedef enum error_code {
_PyXI_ERR_ALREADY_RUNNING = -4,
_PyXI_ERR_MAIN_NS_FAILURE = -5,
_PyXI_ERR_APPLY_NS_FAILURE = -6,
- _PyXI_ERR_NOT_SHAREABLE = -7,
+ _PyXI_ERR_PRESERVE_FAILURE = -7,
+ _PyXI_ERR_EXC_PROPAGATION_FAILURE = -8,
+ _PyXI_ERR_NOT_SHAREABLE = -9,
} _PyXI_errcode;
@@ -350,16 +352,33 @@ typedef struct xi_session _PyXI_session;
PyAPI_FUNC(_PyXI_session *) _PyXI_NewSession(void);
PyAPI_FUNC(void) _PyXI_FreeSession(_PyXI_session *);
+typedef struct {
+ PyObject *preserved;
+ PyObject *excinfo;
+ _PyXI_errcode errcode;
+} _PyXI_session_result;
+PyAPI_FUNC(void) _PyXI_ClearResult(_PyXI_session_result *);
+
PyAPI_FUNC(int) _PyXI_Enter(
_PyXI_session *session,
PyInterpreterState *interp,
- PyObject *nsupdates);
-PyAPI_FUNC(void) _PyXI_Exit(_PyXI_session *session);
-
-PyAPI_FUNC(PyObject *) _PyXI_GetMainNamespace(_PyXI_session *);
-
-PyAPI_FUNC(PyObject *) _PyXI_ApplyCapturedException(_PyXI_session *session);
-PyAPI_FUNC(int) _PyXI_HasCapturedException(_PyXI_session *session);
+ PyObject *nsupdates,
+ _PyXI_session_result *);
+PyAPI_FUNC(int) _PyXI_Exit(
+ _PyXI_session *,
+ _PyXI_errcode,
+ _PyXI_session_result *);
+
+PyAPI_FUNC(PyObject *) _PyXI_GetMainNamespace(
+ _PyXI_session *,
+ _PyXI_errcode *);
+
+PyAPI_FUNC(int) _PyXI_Preserve(
+ _PyXI_session *,
+ const char *,
+ PyObject *,
+ _PyXI_errcode *);
+PyAPI_FUNC(PyObject *) _PyXI_GetPreserved(_PyXI_session_result *, const char *);
/*************/
diff --git a/Include/internal/pycore_lock.h b/Include/internal/pycore_lock.h
index 7484b05d7f2..32b60cc33a2 100644
--- a/Include/internal/pycore_lock.h
+++ b/Include/internal/pycore_lock.h
@@ -48,6 +48,9 @@ typedef enum _PyLockFlags {
// Handle signals if interrupted while waiting on the lock.
_PY_LOCK_HANDLE_SIGNALS = 2,
+
+ // Fail if interrupted by a signal while waiting on the lock.
+ _PY_FAIL_IF_INTERRUPTED = 4,
} _PyLockFlags;
// Lock a mutex with an optional timeout and additional options. See
diff --git a/Include/internal/pycore_magic_number.h b/Include/internal/pycore_magic_number.h
index cd1fc873623..347d9762f26 100644
--- a/Include/internal/pycore_magic_number.h
+++ b/Include/internal/pycore_magic_number.h
@@ -280,6 +280,7 @@ Known values:
Python 3.15a0 3650 (Initial version)
Python 3.15a1 3651 (Simplify LOAD_CONST)
Python 3.15a1 3652 (Virtual iterators)
+ Python 3.15a1 3653 (Fix handling of opcodes that may leave operands on the stack when optimizing LOAD_FAST)
Python 3.16 will start with 3700
@@ -293,7 +294,7 @@ PC/launcher.c must also be updated.
*/
-#define PYC_MAGIC_NUMBER 3652
+#define PYC_MAGIC_NUMBER 3653
/* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes
(little-endian) and then appending b'\r\n'. */
#define PYC_MAGIC_NUMBER_TOKEN \
diff --git a/Include/internal/pycore_stackref.h b/Include/internal/pycore_stackref.h
index f2ecc30b053..87914767252 100644
--- a/Include/internal/pycore_stackref.h
+++ b/Include/internal/pycore_stackref.h
@@ -62,14 +62,15 @@ PyAPI_FUNC(void) _Py_stackref_record_borrow(_PyStackRef ref, const char *filenam
extern void _Py_stackref_associate(PyInterpreterState *interp, PyObject *obj, _PyStackRef ref);
static const _PyStackRef PyStackRef_NULL = { .index = 0 };
+static const _PyStackRef PyStackRef_ERROR = { .index = 2 };
// Use the first 3 even numbers for None, True and False.
// Odd numbers are reserved for (tagged) integers
-#define PyStackRef_None ((_PyStackRef){ .index = 2 } )
-#define PyStackRef_False ((_PyStackRef){ .index = 4 })
-#define PyStackRef_True ((_PyStackRef){ .index = 6 })
+#define PyStackRef_None ((_PyStackRef){ .index = 4 } )
+#define PyStackRef_False ((_PyStackRef){ .index = 6 })
+#define PyStackRef_True ((_PyStackRef){ .index = 8 })
-#define INITIAL_STACKREF_INDEX 8
+#define INITIAL_STACKREF_INDEX 10
static inline int
PyStackRef_IsNull(_PyStackRef ref)
@@ -77,6 +78,19 @@ PyStackRef_IsNull(_PyStackRef ref)
return ref.index == 0;
}
+static inline bool
+PyStackRef_IsError(_PyStackRef ref)
+{
+ return ref.index == 2;
+}
+
+static inline bool
+PyStackRef_IsValid(_PyStackRef ref)
+{
+ /* Invalid values are ERROR and NULL */
+ return !PyStackRef_IsError(ref) && !PyStackRef_IsNull(ref);
+}
+
static inline int
PyStackRef_IsTrue(_PyStackRef ref)
{
@@ -104,6 +118,7 @@ PyStackRef_IsTaggedInt(_PyStackRef ref)
static inline PyObject *
_PyStackRef_AsPyObjectBorrow(_PyStackRef ref, const char *filename, int linenumber)
{
+ assert(!PyStackRef_IsError(ref));
assert(!PyStackRef_IsTaggedInt(ref));
_Py_stackref_record_borrow(ref, filename, linenumber);
return _Py_stackref_get_object(ref);
@@ -155,6 +170,7 @@ _PyStackRef_CLOSE(_PyStackRef ref, const char *filename, int linenumber)
static inline void
_PyStackRef_XCLOSE(_PyStackRef ref, const char *filename, int linenumber)
{
+ assert(!PyStackRef_IsError(ref));
if (PyStackRef_IsNull(ref)) {
return;
}
@@ -165,6 +181,7 @@ _PyStackRef_XCLOSE(_PyStackRef ref, const char *filename, int linenumber)
static inline _PyStackRef
_PyStackRef_DUP(_PyStackRef ref, const char *filename, int linenumber)
{
+ assert(!PyStackRef_IsError(ref));
if (PyStackRef_IsTaggedInt(ref)) {
return ref;
}
@@ -241,9 +258,25 @@ PyStackRef_IsNullOrInt(_PyStackRef ref);
#else
#define Py_INT_TAG 3
+#define Py_TAG_INVALID 2
#define Py_TAG_REFCNT 1
#define Py_TAG_BITS 3
+static const _PyStackRef PyStackRef_ERROR = { .bits = Py_TAG_INVALID };
+
+static inline bool
+PyStackRef_IsError(_PyStackRef ref)
+{
+ return ref.bits == Py_TAG_INVALID;
+}
+
+static inline bool
+PyStackRef_IsValid(_PyStackRef ref)
+{
+ /* Invalid values are ERROR and NULL */
+ return ref.bits >= Py_INT_TAG;
+}
+
static inline bool
PyStackRef_IsTaggedInt(_PyStackRef i)
{
@@ -284,6 +317,7 @@ PyStackRef_IncrementTaggedIntNoOverflow(_PyStackRef ref)
static const _PyStackRef PyStackRef_NULL = { .bits = Py_TAG_DEFERRED};
+
#define PyStackRef_IsNull(stackref) ((stackref).bits == PyStackRef_NULL.bits)
#define PyStackRef_True ((_PyStackRef){.bits = ((uintptr_t)&_Py_TrueStruct) | Py_TAG_DEFERRED })
#define PyStackRef_False ((_PyStackRef){.bits = ((uintptr_t)&_Py_FalseStruct) | Py_TAG_DEFERRED })
diff --git a/Include/object.h b/Include/object.h
index 994cac1ad17..42aed614d4a 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -660,8 +660,13 @@ PyAPI_DATA(PyObject) _Py_NoneStruct; /* Don't use this directly */
PyAPI_FUNC(int) Py_IsNone(PyObject *x);
#define Py_IsNone(x) Py_Is((x), Py_None)
-/* Macro for returning Py_None from a function */
-#define Py_RETURN_NONE return Py_None
+/* Macro for returning Py_None from a function.
+ * Only treat Py_None as immortal in the limited C API 3.12 and newer. */
+#if defined(Py_LIMITED_API) && Py_LIMITED_API+0 < 0x030c0000
+# define Py_RETURN_NONE return Py_NewRef(Py_None)
+#else
+# define Py_RETURN_NONE return Py_None
+#endif
/*
Py_NotImplemented is a singleton used to signal that an operation is
diff --git a/InternalDocs/exception_handling.md b/InternalDocs/exception_handling.md
index 28589787e1f..9e38da4c862 100644
--- a/InternalDocs/exception_handling.md
+++ b/InternalDocs/exception_handling.md
@@ -8,7 +8,7 @@ The cost of raising an exception is increased, but not by much.
The following code:
-```
+```python
try:
g(0)
except:
@@ -18,7 +18,7 @@ except:
compiles into intermediate code like the following:
-```
+```python
RESUME 0
1 SETUP_FINALLY 8 (to L1)
@@ -118,13 +118,13 @@ All offsets and lengths are in code units, not bytes.
We want the format to be compact, but quickly searchable.
For it to be compact, it needs to have variable sized entries so that we can store common (small) offsets compactly, but handle large offsets if needed.
-For it to be searchable quickly, we need to support binary search giving us log(n) performance in all cases.
+For it to be searchable quickly, we need to support binary search giving us `log(n)` performance in all cases.
Binary search typically assumes fixed size entries, but that is not necessary, as long as we can identify the start of an entry.
It is worth noting that the size (end-start) is always smaller than the end, so we encode the entries as:
`start, size, target, depth, push-lasti`.
-Also, sizes are limited to 2**30 as the code length cannot exceed 2**31 and each code unit takes 2 bytes.
+Also, sizes are limited to `2**30` as the code length cannot exceed `2**31` and each code unit takes 2 bytes.
It also happens that depth is generally quite small.
So, we need to encode:
@@ -140,7 +140,7 @@ lasti (1 bit)
We need a marker for the start of the entry, so the first byte of entry will have the most significant bit set.
Since the most significant bit is reserved for marking the start of an entry, we have 7 bits per byte to encode offsets.
Encoding uses a standard varint encoding, but with only 7 bits instead of the usual 8.
-The 8 bits of a byte are (msb left) SXdddddd where S is the start bit. X is the extend bit meaning that the next byte is required to extend the offset.
+The 8 bits of a byte are (msb left) `SXdddddd` where `S` is the start bit. `X` is the extend bit meaning that the next byte is required to extend the offset.
In addition, we combine `depth` and `lasti` into a single value, `((depth<<1)+lasti)`, before encoding.
diff --git a/Lib/_pydecimal.py b/Lib/_pydecimal.py
index 46fa9ffcb1e..781b38ec26b 100644
--- a/Lib/_pydecimal.py
+++ b/Lib/_pydecimal.py
@@ -6120,9 +6120,9 @@ _parse_format_specifier_regex = re.compile(r"""\A
(?P<no_neg_0>z)?
(?P<alt>\#)?
(?P<zeropad>0)?
-(?P<minimumwidth>(?!0)\d+)?
+(?P<minimumwidth>\d+)?
(?P<thousands_sep>[,_])?
-(?:\.(?P<precision>0|(?!0)\d+))?
+(?:\.(?P<precision>\d+))?
(?P<type>[eEfFgGn%])?
\z
""", re.VERBOSE|re.DOTALL)
diff --git a/Lib/argparse.py b/Lib/argparse.py
index d1a6350c3fd..83258cf3e0f 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -1534,7 +1534,7 @@ class _ActionsContainer(object):
action_name = kwargs.get('action')
action_class = self._pop_action_class(kwargs)
if not callable(action_class):
- raise ValueError('unknown action {action_class!r}')
+ raise ValueError(f'unknown action {action_class!r}')
action = action_class(**kwargs)
# raise an error if action for positional argument does not
diff --git a/Lib/ast.py b/Lib/ast.py
index b9791bf52d3..6d3daf64f5c 100644
--- a/Lib/ast.py
+++ b/Lib/ast.py
@@ -147,18 +147,22 @@ def dump(
if value is None and getattr(cls, name, ...) is None:
keywords = True
continue
- if (
- not show_empty
- and (value is None or value == [])
- # Special cases:
- # `Constant(value=None)` and `MatchSingleton(value=None)`
- and not isinstance(node, (Constant, MatchSingleton))
- ):
- args_buffer.append(repr(value))
- continue
- elif not keywords:
- args.extend(args_buffer)
- args_buffer = []
+ if not show_empty:
+ if value == []:
+ field_type = cls._field_types.get(name, object)
+ if getattr(field_type, '__origin__', ...) is list:
+ if not keywords:
+ args_buffer.append(repr(value))
+ continue
+ elif isinstance(value, Load):
+ field_type = cls._field_types.get(name, object)
+ if field_type is expr_context:
+ if not keywords:
+ args_buffer.append(repr(value))
+ continue
+ if not keywords:
+ args.extend(args_buffer)
+ args_buffer = []
value, simple = _format(value, level)
allsimple = allsimple and simple
if keywords:
diff --git a/Lib/code.py b/Lib/code.py
index b134886dc26..f7e275d8801 100644
--- a/Lib/code.py
+++ b/Lib/code.py
@@ -224,7 +224,7 @@ class InteractiveConsole(InteractiveInterpreter):
sys.ps1 = ">>> "
delete_ps1_after = True
try:
- _ps2 = sys.ps2
+ sys.ps2
delete_ps2_after = False
except AttributeError:
sys.ps2 = "... "
diff --git a/Lib/dbm/dumb.py b/Lib/dbm/dumb.py
index def120ffc37..1bc239a84ff 100644
--- a/Lib/dbm/dumb.py
+++ b/Lib/dbm/dumb.py
@@ -9,7 +9,7 @@ XXX TO DO:
- seems to contain a bug when updating...
- reclaim free space (currently, space once occupied by deleted or expanded
-items is never reused)
+items is not reused exept if .reorganize() is called)
- support concurrent access (currently, if two processes take turns making
updates, they can mess up the index)
@@ -17,8 +17,6 @@ updates, they can mess up the index)
- support efficient access to large databases (currently, the whole index
is read when the database is opened, and some updates rewrite the whole index)
-- support opening for read-only (flag = 'm')
-
"""
import ast as _ast
@@ -289,6 +287,34 @@ class _Database(collections.abc.MutableMapping):
def __exit__(self, *args):
self.close()
+ def reorganize(self):
+ if self._readonly:
+ raise error('The database is opened for reading only')
+ self._verify_open()
+ # Ensure all changes are committed before reorganizing.
+ self._commit()
+ # Open file in r+ to allow changing in-place.
+ with _io.open(self._datfile, 'rb+') as f:
+ reorganize_pos = 0
+
+ # Iterate over existing keys, sorted by starting byte.
+ for key in sorted(self._index, key = lambda k: self._index[k][0]):
+ pos, siz = self._index[key]
+ f.seek(pos)
+ val = f.read(siz)
+
+ f.seek(reorganize_pos)
+ f.write(val)
+ self._index[key] = (reorganize_pos, siz)
+
+ blocks_occupied = (siz + _BLOCKSIZE - 1) // _BLOCKSIZE
+ reorganize_pos += blocks_occupied * _BLOCKSIZE
+
+ f.truncate(reorganize_pos)
+ # Commit changes to index, which were not in-place.
+ self._commit()
+
+
def open(file, flag='c', mode=0o666):
"""Open the database file, filename, and return corresponding object.
diff --git a/Lib/dbm/sqlite3.py b/Lib/dbm/sqlite3.py
index 7e0ae2a29e3..b296a1bcd1b 100644
--- a/Lib/dbm/sqlite3.py
+++ b/Lib/dbm/sqlite3.py
@@ -15,6 +15,7 @@ LOOKUP_KEY = "SELECT value FROM Dict WHERE key = CAST(? AS BLOB)"
STORE_KV = "REPLACE INTO Dict (key, value) VALUES (CAST(? AS BLOB), CAST(? AS BLOB))"
DELETE_KEY = "DELETE FROM Dict WHERE key = CAST(? AS BLOB)"
ITER_KEYS = "SELECT key FROM Dict"
+REORGANIZE = "VACUUM"
class error(OSError):
@@ -122,6 +123,9 @@ class _Database(MutableMapping):
def __exit__(self, *args):
self.close()
+ def reorganize(self):
+ self._execute(REORGANIZE)
+
def open(filename, /, flag="r", mode=0o666):
"""Open a dbm.sqlite3 database and return the dbm object.
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 2acb6cb79f3..c8c95ecbb27 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -101,6 +101,7 @@ import pdb
import re
import sys
import traceback
+import types
import unittest
from io import StringIO, IncrementalNewlineDecoder
from collections import namedtuple
@@ -385,7 +386,7 @@ class _OutputRedirectingPdb(pdb.Pdb):
self.__out = out
self.__debugger_used = False
# do not play signal games in the pdb
- pdb.Pdb.__init__(self, stdout=out, nosigint=True)
+ super().__init__(stdout=out, nosigint=True)
# still use input() to get user input
self.use_rawinput = 1
@@ -1278,6 +1279,11 @@ class DocTestRunner:
# Reporting methods
#/////////////////////////////////////////////////////////////////
+ def report_skip(self, out, test, example):
+ """
+ Report that the given example was skipped.
+ """
+
def report_start(self, out, test, example):
"""
Report that the test runner is about to process the given
@@ -1375,6 +1381,8 @@ class DocTestRunner:
# If 'SKIP' is set, then skip this example.
if self.optionflags & SKIP:
+ if not quiet:
+ self.report_skip(out, test, example)
skips += 1
continue
@@ -1395,11 +1403,11 @@ class DocTestRunner:
exec(compile(example.source, filename, "single",
compileflags, True), test.globs)
self.debugger.set_continue() # ==== Example Finished ====
- exception = None
+ exc_info = None
except KeyboardInterrupt:
raise
- except:
- exception = sys.exc_info()
+ except BaseException as exc:
+ exc_info = type(exc), exc, exc.__traceback__.tb_next
self.debugger.set_continue() # ==== Example Finished ====
got = self._fakeout.getvalue() # the actual output
@@ -1408,21 +1416,21 @@ class DocTestRunner:
# If the example executed without raising any exceptions,
# verify its output.
- if exception is None:
+ if exc_info is None:
if check(example.want, got, self.optionflags):
outcome = SUCCESS
# The example raised an exception: check if it was expected.
else:
- formatted_ex = traceback.format_exception_only(*exception[:2])
- if issubclass(exception[0], SyntaxError):
+ formatted_ex = traceback.format_exception_only(*exc_info[:2])
+ if issubclass(exc_info[0], SyntaxError):
# SyntaxError / IndentationError is special:
# we don't care about the carets / suggestions / etc
# We only care about the error message and notes.
# They start with `SyntaxError:` (or any other class name)
exception_line_prefixes = (
- f"{exception[0].__qualname__}:",
- f"{exception[0].__module__}.{exception[0].__qualname__}:",
+ f"{exc_info[0].__qualname__}:",
+ f"{exc_info[0].__module__}.{exc_info[0].__qualname__}:",
)
exc_msg_index = next(
index
@@ -1433,7 +1441,7 @@ class DocTestRunner:
exc_msg = "".join(formatted_ex)
if not quiet:
- got += _exception_traceback(exception)
+ got += _exception_traceback(exc_info)
# If `example.exc_msg` is None, then we weren't expecting
# an exception.
@@ -1462,7 +1470,7 @@ class DocTestRunner:
elif outcome is BOOM:
if not quiet:
self.report_unexpected_exception(out, test, example,
- exception)
+ exc_info)
failures += 1
else:
assert False, ("unknown outcome", outcome)
@@ -2272,12 +2280,63 @@ def set_unittest_reportflags(flags):
return old
+class _DocTestCaseRunner(DocTestRunner):
+
+ def __init__(self, *args, test_case, test_result, **kwargs):
+ super().__init__(*args, **kwargs)
+ self._test_case = test_case
+ self._test_result = test_result
+ self._examplenum = 0
+
+ def _subTest(self):
+ subtest = unittest.case._SubTest(self._test_case, str(self._examplenum), {})
+ self._examplenum += 1
+ return subtest
+
+ def report_skip(self, out, test, example):
+ unittest.case._addSkip(self._test_result, self._subTest(), '')
+
+ def report_success(self, out, test, example, got):
+ self._test_result.addSubTest(self._test_case, self._subTest(), None)
+
+ def report_unexpected_exception(self, out, test, example, exc_info):
+ tb = self._add_traceback(exc_info[2], test, example)
+ exc_info = (*exc_info[:2], tb)
+ self._test_result.addSubTest(self._test_case, self._subTest(), exc_info)
+
+ def report_failure(self, out, test, example, got):
+ msg = ('Failed example:\n' + _indent(example.source) +
+ self._checker.output_difference(example, got, self.optionflags).rstrip('\n'))
+ exc = self._test_case.failureException(msg)
+ tb = self._add_traceback(None, test, example)
+ exc_info = (type(exc), exc, tb)
+ self._test_result.addSubTest(self._test_case, self._subTest(), exc_info)
+
+ def _add_traceback(self, traceback, test, example):
+ if test.lineno is None or example.lineno is None:
+ lineno = None
+ else:
+ lineno = test.lineno + example.lineno + 1
+ return types.SimpleNamespace(
+ tb_frame = types.SimpleNamespace(
+ f_globals=test.globs,
+ f_code=types.SimpleNamespace(
+ co_filename=test.filename,
+ co_name=test.name,
+ ),
+ ),
+ tb_next = traceback,
+ tb_lasti = -1,
+ tb_lineno = lineno,
+ )
+
+
class DocTestCase(unittest.TestCase):
def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
checker=None):
- unittest.TestCase.__init__(self)
+ super().__init__()
self._dt_optionflags = optionflags
self._dt_checker = checker
self._dt_test = test
@@ -2301,30 +2360,28 @@ class DocTestCase(unittest.TestCase):
test.globs.clear()
test.globs.update(self._dt_globs)
+ def run(self, result=None):
+ self._test_result = result
+ return super().run(result)
+
def runTest(self):
test = self._dt_test
- old = sys.stdout
- new = StringIO()
optionflags = self._dt_optionflags
+ result = self._test_result
if not (optionflags & REPORTING_FLAGS):
# The option flags don't include any reporting flags,
# so add the default reporting flags
optionflags |= _unittest_reportflags
+ if getattr(result, 'failfast', False):
+ optionflags |= FAIL_FAST
- runner = DocTestRunner(optionflags=optionflags,
- checker=self._dt_checker, verbose=False)
-
- try:
- runner.DIVIDER = "-"*70
- results = runner.run(test, out=new.write, clear_globs=False)
- if results.skipped == results.attempted:
- raise unittest.SkipTest("all examples were skipped")
- finally:
- sys.stdout = old
-
- if results.failed:
- raise self.failureException(self.format_failure(new.getvalue()))
+ runner = _DocTestCaseRunner(optionflags=optionflags,
+ checker=self._dt_checker, verbose=False,
+ test_case=self, test_result=result)
+ results = runner.run(test, clear_globs=False)
+ if results.skipped == results.attempted:
+ raise unittest.SkipTest("all examples were skipped")
def format_failure(self, err):
test = self._dt_test
@@ -2439,7 +2496,7 @@ class DocTestCase(unittest.TestCase):
class SkipDocTestCase(DocTestCase):
def __init__(self, module):
self.module = module
- DocTestCase.__init__(self, None)
+ super().__init__(None)
def setUp(self):
self.skipTest("DocTestSuite will not work with -O2 and above")
diff --git a/Lib/email/_header_value_parser.py b/Lib/email/_header_value_parser.py
index f11fa83d45e..91243378dc0 100644
--- a/Lib/email/_header_value_parser.py
+++ b/Lib/email/_header_value_parser.py
@@ -1575,7 +1575,7 @@ def get_dtext(value):
def _check_for_early_dl_end(value, domain_literal):
if value:
return False
- domain_literal.append(errors.InvalidHeaderDefect(
+ domain_literal.defects.append(errors.InvalidHeaderDefect(
"end of input inside domain-literal"))
domain_literal.append(ValueTerminal(']', 'domain-literal-end'))
return True
@@ -1594,9 +1594,9 @@ def get_domain_literal(value):
raise errors.HeaderParseError("expected '[' at start of domain-literal "
"but found '{}'".format(value))
value = value[1:]
+ domain_literal.append(ValueTerminal('[', 'domain-literal-start'))
if _check_for_early_dl_end(value, domain_literal):
return domain_literal, value
- domain_literal.append(ValueTerminal('[', 'domain-literal-start'))
if value[0] in WSP:
token, value = get_fws(value)
domain_literal.append(token)
diff --git a/Lib/fractions.py b/Lib/fractions.py
index 063f28478c7..cb05ae7c200 100644
--- a/Lib/fractions.py
+++ b/Lib/fractions.py
@@ -168,9 +168,9 @@ _FLOAT_FORMAT_SPECIFICATION_MATCHER = re.compile(r"""
# A '0' that's *not* followed by another digit is parsed as a minimum width
# rather than a zeropad flag.
(?P<zeropad>0(?=[0-9]))?
- (?P<minimumwidth>0|[1-9][0-9]*)?
+ (?P<minimumwidth>[0-9]+)?
(?P<thousands_sep>[,_])?
- (?:\.(?P<precision>0|[1-9][0-9]*))?
+ (?:\.(?P<precision>[0-9]+))?
(?P<presentation_type>[eEfFgG%])
""", re.DOTALL | re.VERBOSE).fullmatch
diff --git a/Lib/genericpath.py b/Lib/genericpath.py
index ba7b0a13c7f..9363f564aab 100644
--- a/Lib/genericpath.py
+++ b/Lib/genericpath.py
@@ -8,7 +8,7 @@ import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdevdrive', 'isdir', 'isfile', 'isjunction', 'islink',
- 'lexists', 'samefile', 'sameopenfile', 'samestat']
+ 'lexists', 'samefile', 'sameopenfile', 'samestat', 'ALLOW_MISSING']
# Does a path exist?
@@ -189,3 +189,12 @@ def _check_arg_types(funcname, *args):
f'os.PathLike object, not {s.__class__.__name__!r}') from None
if hasstr and hasbytes:
raise TypeError("Can't mix strings and bytes in path components") from None
+
+# A singleton with a true boolean value.
+@object.__new__
+class ALLOW_MISSING:
+ """Special value for use in realpath()."""
+ def __repr__(self):
+ return 'os.path.ALLOW_MISSING'
+ def __reduce__(self):
+ return self.__class__.__name__
diff --git a/Lib/hashlib.py b/Lib/hashlib.py
index abacac22ea0..0e9bd98aa1f 100644
--- a/Lib/hashlib.py
+++ b/Lib/hashlib.py
@@ -141,29 +141,29 @@ def __get_openssl_constructor(name):
return __get_builtin_constructor(name)
-def __py_new(name, data=b'', **kwargs):
+def __py_new(name, *args, **kwargs):
"""new(name, data=b'', **kwargs) - Return a new hashing object using the
named algorithm; optionally initialized with data (which must be
a bytes-like object).
"""
- return __get_builtin_constructor(name)(data, **kwargs)
+ return __get_builtin_constructor(name)(*args, **kwargs)
-def __hash_new(name, data=b'', **kwargs):
+def __hash_new(name, *args, **kwargs):
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be a bytes-like object).
"""
if name in __block_openssl_constructor:
# Prefer our builtin blake2 implementation.
- return __get_builtin_constructor(name)(data, **kwargs)
+ return __get_builtin_constructor(name)(*args, **kwargs)
try:
- return _hashlib.new(name, data, **kwargs)
+ return _hashlib.new(name, *args, **kwargs)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
# This allows for SHA224/256 and SHA384/512 support even though
# the OpenSSL library prior to 0.9.8 doesn't provide them.
- return __get_builtin_constructor(name)(data)
+ return __get_builtin_constructor(name)(*args, **kwargs)
try:
diff --git a/Lib/idlelib/News3.txt b/Lib/idlelib/News3.txt
index 74d84b38931..30784578cc6 100644
--- a/Lib/idlelib/News3.txt
+++ b/Lib/idlelib/News3.txt
@@ -4,6 +4,13 @@ Released on 2025-10-07
=========================
+gh-112936: IDLE - Include Shell menu in single-process mode,
+though with Restart Shell and View Last Restart disabled.
+Patch by Zhikang Yan.
+
+gh-112938: IDLE - Fix uninteruptable hang when Shell gets
+rapid continuous output.
+
gh-127060: Set TERM environment variable to 'dumb' to not add ANSI escape
sequences for text color in tracebacks. IDLE does not understand them.
Patch by Victor Stinner.
diff --git a/Lib/json/encoder.py b/Lib/json/encoder.py
index 016638549aa..bc446e0f377 100644
--- a/Lib/json/encoder.py
+++ b/Lib/json/encoder.py
@@ -348,7 +348,6 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
item_separator = _item_separator + newline_indent
- yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
@@ -381,6 +380,8 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
f'not {key.__class__.__name__}')
if first:
first = False
+ if newline_indent is not None:
+ yield newline_indent
else:
yield item_separator
yield _encoder(key)
@@ -413,7 +414,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
except BaseException as exc:
exc.add_note(f'when serializing {type(dct).__name__} item {key!r}')
raise
- if newline_indent is not None:
+ if not first and newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield '}'
diff --git a/Lib/ntpath.py b/Lib/ntpath.py
index 52ff2af743a..9cdc16480f9 100644
--- a/Lib/ntpath.py
+++ b/Lib/ntpath.py
@@ -29,7 +29,7 @@ __all__ = ["normcase","isabs","join","splitdrive","splitroot","split","splitext"
"abspath","curdir","pardir","sep","pathsep","defpath","altsep",
"extsep","devnull","realpath","supports_unicode_filenames","relpath",
"samefile", "sameopenfile", "samestat", "commonpath", "isjunction",
- "isdevdrive"]
+ "isdevdrive", "ALLOW_MISSING"]
def _get_bothseps(path):
if isinstance(path, bytes):
@@ -601,9 +601,10 @@ try:
from nt import _findfirstfile, _getfinalpathname, readlink as _nt_readlink
except ImportError:
# realpath is a no-op on systems without _getfinalpathname support.
- realpath = abspath
+ def realpath(path, *, strict=False):
+ return abspath(path)
else:
- def _readlink_deep(path):
+ def _readlink_deep(path, ignored_error=OSError):
# These error codes indicate that we should stop reading links and
# return the path we currently have.
# 1: ERROR_INVALID_FUNCTION
@@ -636,7 +637,7 @@ else:
path = old_path
break
path = normpath(join(dirname(old_path), path))
- except OSError as ex:
+ except ignored_error as ex:
if ex.winerror in allowed_winerror:
break
raise
@@ -645,7 +646,7 @@ else:
break
return path
- def _getfinalpathname_nonstrict(path):
+ def _getfinalpathname_nonstrict(path, ignored_error=OSError):
# These error codes indicate that we should stop resolving the path
# and return the value we currently have.
# 1: ERROR_INVALID_FUNCTION
@@ -673,17 +674,18 @@ else:
try:
path = _getfinalpathname(path)
return join(path, tail) if tail else path
- except OSError as ex:
+ except ignored_error as ex:
if ex.winerror not in allowed_winerror:
raise
try:
# The OS could not resolve this path fully, so we attempt
# to follow the link ourselves. If we succeed, join the tail
# and return.
- new_path = _readlink_deep(path)
+ new_path = _readlink_deep(path,
+ ignored_error=ignored_error)
if new_path != path:
return join(new_path, tail) if tail else new_path
- except OSError:
+ except ignored_error:
# If we fail to readlink(), let's keep traversing
pass
# If we get these errors, try to get the real name of the file without accessing it.
@@ -691,7 +693,7 @@ else:
try:
name = _findfirstfile(path)
path, _ = split(path)
- except OSError:
+ except ignored_error:
path, name = split(path)
else:
path, name = split(path)
@@ -721,6 +723,15 @@ else:
if normcase(path) == devnull:
return '\\\\.\\NUL'
had_prefix = path.startswith(prefix)
+
+ if strict is ALLOW_MISSING:
+ ignored_error = FileNotFoundError
+ strict = True
+ elif strict:
+ ignored_error = ()
+ else:
+ ignored_error = OSError
+
if not had_prefix and not isabs(path):
path = join(cwd, path)
try:
@@ -728,17 +739,16 @@ else:
initial_winerror = 0
except ValueError as ex:
# gh-106242: Raised for embedded null characters
- # In strict mode, we convert into an OSError.
+ # In strict modes, we convert into an OSError.
# Non-strict mode returns the path as-is, since we've already
# made it absolute.
if strict:
raise OSError(str(ex)) from None
path = normpath(path)
- except OSError as ex:
- if strict:
- raise
+ except ignored_error as ex:
initial_winerror = ex.winerror
- path = _getfinalpathname_nonstrict(path)
+ path = _getfinalpathname_nonstrict(path,
+ ignored_error=ignored_error)
# The path returned by _getfinalpathname will always start with \\?\ -
# strip off that prefix unless it was already provided on the original
# path.
diff --git a/Lib/posixpath.py b/Lib/posixpath.py
index db72ded8826..d38f3bd5872 100644
--- a/Lib/posixpath.py
+++ b/Lib/posixpath.py
@@ -36,7 +36,7 @@ __all__ = ["normcase","isabs","join","splitdrive","splitroot","split","splitext"
"samefile","sameopenfile","samestat",
"curdir","pardir","sep","pathsep","defpath","altsep","extsep",
"devnull","realpath","supports_unicode_filenames","relpath",
- "commonpath", "isjunction","isdevdrive"]
+ "commonpath", "isjunction","isdevdrive","ALLOW_MISSING"]
def _get_sep(path):
@@ -402,10 +402,18 @@ symbolic links encountered in the path."""
curdir = '.'
pardir = '..'
getcwd = os.getcwd
- return _realpath(filename, strict, sep, curdir, pardir, getcwd)
+ if strict is ALLOW_MISSING:
+ ignored_error = FileNotFoundError
+ strict = True
+ elif strict:
+ ignored_error = ()
+ else:
+ ignored_error = OSError
+
+ lstat = os.lstat
+ readlink = os.readlink
+ maxlinks = None
-def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir,
- getcwd=os.getcwd, lstat=os.lstat, readlink=os.readlink, maxlinks=None):
# The stack of unresolved path parts. When popped, a special value of None
# indicates that a symlink target has been resolved, and that the original
# symlink path can be retrieved by popping again. The [::-1] slice is a
@@ -477,27 +485,28 @@ def _realpath(filename, strict=False, sep=sep, curdir=curdir, pardir=pardir,
path = newpath
continue
target = readlink(newpath)
- except OSError:
- if strict:
- raise
- path = newpath
+ except ignored_error:
+ pass
+ else:
+ # Resolve the symbolic link
+ if target.startswith(sep):
+ # Symlink target is absolute; reset resolved path.
+ path = sep
+ if maxlinks is None:
+ # Mark this symlink as seen but not fully resolved.
+ seen[newpath] = None
+ # Push the symlink path onto the stack, and signal its specialness
+ # by also pushing None. When these entries are popped, we'll
+ # record the fully-resolved symlink target in the 'seen' mapping.
+ rest.append(newpath)
+ rest.append(None)
+ # Push the unresolved symlink target parts onto the stack.
+ target_parts = target.split(sep)[::-1]
+ rest.extend(target_parts)
+ part_count += len(target_parts)
continue
- # Resolve the symbolic link
- if target.startswith(sep):
- # Symlink target is absolute; reset resolved path.
- path = sep
- if maxlinks is None:
- # Mark this symlink as seen but not fully resolved.
- seen[newpath] = None
- # Push the symlink path onto the stack, and signal its specialness
- # by also pushing None. When these entries are popped, we'll
- # record the fully-resolved symlink target in the 'seen' mapping.
- rest.append(newpath)
- rest.append(None)
- # Push the unresolved symlink target parts onto the stack.
- target_parts = target.split(sep)[::-1]
- rest.extend(target_parts)
- part_count += len(target_parts)
+ # An error occurred and was ignored.
+ path = newpath
return path
diff --git a/Lib/shelve.py b/Lib/shelve.py
index 50584716e9e..b53dc8b7a8e 100644
--- a/Lib/shelve.py
+++ b/Lib/shelve.py
@@ -171,6 +171,11 @@ class Shelf(collections.abc.MutableMapping):
if hasattr(self.dict, 'sync'):
self.dict.sync()
+ def reorganize(self):
+ self.sync()
+ if hasattr(self.dict, 'reorganize'):
+ self.dict.reorganize()
+
class BsdDbShelf(Shelf):
"""Shelf implementation using the "BSD" db interface.
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index 212b71f6509..068aa13ed70 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -67,7 +67,7 @@ __all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError", "ReadError",
"DEFAULT_FORMAT", "open","fully_trusted_filter", "data_filter",
"tar_filter", "FilterError", "AbsoluteLinkError",
"OutsideDestinationError", "SpecialFileError", "AbsolutePathError",
- "LinkOutsideDestinationError"]
+ "LinkOutsideDestinationError", "LinkFallbackError"]
#---------------------------------------------------------
@@ -766,10 +766,22 @@ class LinkOutsideDestinationError(FilterError):
super().__init__(f'{tarinfo.name!r} would link to {path!r}, '
+ 'which is outside the destination')
+class LinkFallbackError(FilterError):
+ def __init__(self, tarinfo, path):
+ self.tarinfo = tarinfo
+ self._path = path
+ super().__init__(f'link {tarinfo.name!r} would be extracted as a '
+ + f'copy of {path!r}, which was rejected')
+
+# Errors caused by filters -- both "fatal" and "non-fatal" -- that
+# we consider to be issues with the argument, rather than a bug in the
+# filter function
+_FILTER_ERRORS = (FilterError, OSError, ExtractError)
+
def _get_filtered_attrs(member, dest_path, for_data=True):
new_attrs = {}
name = member.name
- dest_path = os.path.realpath(dest_path)
+ dest_path = os.path.realpath(dest_path, strict=os.path.ALLOW_MISSING)
# Strip leading / (tar's directory separator) from filenames.
# Include os.sep (target OS directory separator) as well.
if name.startswith(('/', os.sep)):
@@ -779,7 +791,8 @@ def _get_filtered_attrs(member, dest_path, for_data=True):
# For example, 'C:/foo' on Windows.
raise AbsolutePathError(member)
# Ensure we stay in the destination
- target_path = os.path.realpath(os.path.join(dest_path, name))
+ target_path = os.path.realpath(os.path.join(dest_path, name),
+ strict=os.path.ALLOW_MISSING)
if os.path.commonpath([target_path, dest_path]) != dest_path:
raise OutsideDestinationError(member, target_path)
# Limit permissions (no high bits, and go-w)
@@ -817,6 +830,9 @@ def _get_filtered_attrs(member, dest_path, for_data=True):
if member.islnk() or member.issym():
if os.path.isabs(member.linkname):
raise AbsoluteLinkError(member)
+ normalized = os.path.normpath(member.linkname)
+ if normalized != member.linkname:
+ new_attrs['linkname'] = normalized
if member.issym():
target_path = os.path.join(dest_path,
os.path.dirname(name),
@@ -824,7 +840,8 @@ def _get_filtered_attrs(member, dest_path, for_data=True):
else:
target_path = os.path.join(dest_path,
member.linkname)
- target_path = os.path.realpath(target_path)
+ target_path = os.path.realpath(target_path,
+ strict=os.path.ALLOW_MISSING)
if os.path.commonpath([target_path, dest_path]) != dest_path:
raise LinkOutsideDestinationError(member, target_path)
return new_attrs
@@ -2386,30 +2403,58 @@ class TarFile(object):
members = self
for member in members:
- tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ tarinfo, unfiltered = self._get_extract_tarinfo(
+ member, filter_function, path)
if tarinfo is None:
continue
if tarinfo.isdir():
# For directories, delay setting attributes until later,
# since permissions can interfere with extraction and
# extracting contents can reset mtime.
- directories.append(tarinfo)
+ directories.append(unfiltered)
self._extract_one(tarinfo, path, set_attrs=not tarinfo.isdir(),
- numeric_owner=numeric_owner)
+ numeric_owner=numeric_owner,
+ filter_function=filter_function)
# Reverse sort directories.
directories.sort(key=lambda a: a.name, reverse=True)
+
# Set correct owner, mtime and filemode on directories.
- for tarinfo in directories:
- dirpath = os.path.join(path, tarinfo.name)
+ for unfiltered in directories:
try:
+ # Need to re-apply any filter, to take the *current* filesystem
+ # state into account.
+ try:
+ tarinfo = filter_function(unfiltered, path)
+ except _FILTER_ERRORS as exc:
+ self._log_no_directory_fixup(unfiltered, repr(exc))
+ continue
+ if tarinfo is None:
+ self._log_no_directory_fixup(unfiltered,
+ 'excluded by filter')
+ continue
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ lstat = os.lstat(dirpath)
+ except FileNotFoundError:
+ self._log_no_directory_fixup(tarinfo, 'missing')
+ continue
+ if not stat.S_ISDIR(lstat.st_mode):
+ # This is no longer a directory; presumably a later
+ # member overwrote the entry.
+ self._log_no_directory_fixup(tarinfo, 'not a directory')
+ continue
self.chown(tarinfo, dirpath, numeric_owner=numeric_owner)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
self._handle_nonfatal_error(e)
+ def _log_no_directory_fixup(self, member, reason):
+ self._dbg(2, "tarfile: Not fixing up directory %r (%s)" %
+ (member.name, reason))
+
def extract(self, member, path="", set_attrs=True, *, numeric_owner=False,
filter=None):
"""Extract a member from the archive to the current working directory,
@@ -2425,41 +2470,56 @@ class TarFile(object):
String names of common filters are accepted.
"""
filter_function = self._get_filter_function(filter)
- tarinfo = self._get_extract_tarinfo(member, filter_function, path)
+ tarinfo, unfiltered = self._get_extract_tarinfo(
+ member, filter_function, path)
if tarinfo is not None:
self._extract_one(tarinfo, path, set_attrs, numeric_owner)
def _get_extract_tarinfo(self, member, filter_function, path):
- """Get filtered TarInfo (or None) from member, which might be a str"""
+ """Get (filtered, unfiltered) TarInfos from *member*
+
+ *member* might be a string.
+
+ Return (None, None) if not found.
+ """
+
if isinstance(member, str):
- tarinfo = self.getmember(member)
+ unfiltered = self.getmember(member)
else:
- tarinfo = member
+ unfiltered = member
- unfiltered = tarinfo
+ filtered = None
try:
- tarinfo = filter_function(tarinfo, path)
+ filtered = filter_function(unfiltered, path)
except (OSError, UnicodeEncodeError, FilterError) as e:
self._handle_fatal_error(e)
except ExtractError as e:
self._handle_nonfatal_error(e)
- if tarinfo is None:
+ if filtered is None:
self._dbg(2, "tarfile: Excluded %r" % unfiltered.name)
- return None
+ return None, None
+
# Prepare the link target for makelink().
- if tarinfo.islnk():
- tarinfo = copy.copy(tarinfo)
- tarinfo._link_target = os.path.join(path, tarinfo.linkname)
- return tarinfo
+ if filtered.islnk():
+ filtered = copy.copy(filtered)
+ filtered._link_target = os.path.join(path, filtered.linkname)
+ return filtered, unfiltered
+
+ def _extract_one(self, tarinfo, path, set_attrs, numeric_owner,
+ filter_function=None):
+ """Extract from filtered tarinfo to disk.
- def _extract_one(self, tarinfo, path, set_attrs, numeric_owner):
- """Extract from filtered tarinfo to disk"""
+ filter_function is only used when extracting a *different*
+ member (e.g. as fallback to creating a symlink)
+ """
self._check("r")
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs,
- numeric_owner=numeric_owner)
+ numeric_owner=numeric_owner,
+ filter_function=filter_function,
+ extraction_root=path)
except (OSError, UnicodeEncodeError) as e:
self._handle_fatal_error(e)
except ExtractError as e:
@@ -2517,9 +2577,13 @@ class TarFile(object):
return None
def _extract_member(self, tarinfo, targetpath, set_attrs=True,
- numeric_owner=False):
- """Extract the TarInfo object tarinfo to a physical
+ numeric_owner=False, *, filter_function=None,
+ extraction_root=None):
+ """Extract the filtered TarInfo object tarinfo to a physical
file called targetpath.
+
+ filter_function is only used when extracting a *different*
+ member (e.g. as fallback to creating a symlink)
"""
# Fetch the TarInfo object for the given name
# and build the destination pathname, replacing
@@ -2548,7 +2612,10 @@ class TarFile(object):
elif tarinfo.ischr() or tarinfo.isblk():
self.makedev(tarinfo, targetpath)
elif tarinfo.islnk() or tarinfo.issym():
- self.makelink(tarinfo, targetpath)
+ self.makelink_with_filter(
+ tarinfo, targetpath,
+ filter_function=filter_function,
+ extraction_root=extraction_root)
elif tarinfo.type not in SUPPORTED_TYPES:
self.makeunknown(tarinfo, targetpath)
else:
@@ -2631,10 +2698,18 @@ class TarFile(object):
os.makedev(tarinfo.devmajor, tarinfo.devminor))
def makelink(self, tarinfo, targetpath):
+ return self.makelink_with_filter(tarinfo, targetpath, None, None)
+
+ def makelink_with_filter(self, tarinfo, targetpath,
+ filter_function, extraction_root):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
+
+ filter_function is only used when extracting a *different*
+ member (e.g. as fallback to creating a link).
"""
+ keyerror_to_extracterror = False
try:
# For systems that support symbolic and hard links.
if tarinfo.issym():
@@ -2642,18 +2717,38 @@ class TarFile(object):
# Avoid FileExistsError on following os.symlink.
os.unlink(targetpath)
os.symlink(tarinfo.linkname, targetpath)
+ return
else:
if os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath)
- else:
- self._extract_member(self._find_link_target(tarinfo),
- targetpath)
+ return
except symlink_exception:
+ keyerror_to_extracterror = True
+
+ try:
+ unfiltered = self._find_link_target(tarinfo)
+ except KeyError:
+ if keyerror_to_extracterror:
+ raise ExtractError(
+ "unable to resolve link inside archive") from None
+ else:
+ raise
+
+ if filter_function is None:
+ filtered = unfiltered
+ else:
+ if extraction_root is None:
+ raise ExtractError(
+ "makelink_with_filter: if filter_function is not None, "
+ + "extraction_root must also not be None")
try:
- self._extract_member(self._find_link_target(tarinfo),
- targetpath)
- except KeyError:
- raise ExtractError("unable to resolve link inside archive") from None
+ filtered = filter_function(unfiltered, extraction_root)
+ except _FILTER_ERRORS as cause:
+ raise LinkFallbackError(tarinfo, unfiltered.name) from cause
+ if filtered is not None:
+ self._extract_member(filtered, targetpath,
+ filter_function=filter_function,
+ extraction_root=extraction_root)
def chown(self, tarinfo, targetpath, numeric_owner):
"""Set owner of targetpath according to tarinfo. If numeric_owner
diff --git a/Lib/test/_code_definitions.py b/Lib/test/_code_definitions.py
index 733a15b25f6..274beb65a6d 100644
--- a/Lib/test/_code_definitions.py
+++ b/Lib/test/_code_definitions.py
@@ -57,6 +57,15 @@ def spam_with_globals_and_builtins():
print(res)
+def spam_full_args(a, b, /, c, d, *args, e, f, **kwargs):
+ return (a, b, c, d, e, f, args, kwargs)
+
+
+def spam_full_args_with_defaults(a=-1, b=-2, /, c=-3, d=-4, *args,
+ e=-5, f=-6, **kwargs):
+ return (a, b, c, d, e, f, args, kwargs)
+
+
def spam_args_attrs_and_builtins(a, b, /, c, d, *args, e, f, **kwargs):
if args.__len__() > 2:
return None
@@ -67,6 +76,10 @@ def spam_returns_arg(x):
return x
+def spam_raises():
+ raise Exception('spam!')
+
+
def spam_with_inner_not_closure():
def eggs():
pass
@@ -177,8 +190,11 @@ TOP_FUNCTIONS = [
spam_minimal,
spam_with_builtins,
spam_with_globals_and_builtins,
+ spam_full_args,
+ spam_full_args_with_defaults,
spam_args_attrs_and_builtins,
spam_returns_arg,
+ spam_raises,
spam_with_inner_not_closure,
spam_with_inner_closure,
spam_annotated,
@@ -219,8 +235,10 @@ STATELESS_FUNCTIONS = [
spam,
spam_minimal,
spam_with_builtins,
+ spam_full_args,
spam_args_attrs_and_builtins,
spam_returns_arg,
+ spam_raises,
spam_annotated,
spam_with_inner_not_closure,
spam_with_inner_closure,
@@ -238,6 +256,7 @@ STATELESS_FUNCTIONS = [
STATELESS_CODE = [
*STATELESS_FUNCTIONS,
script_with_globals,
+ spam_full_args_with_defaults,
spam_with_globals_and_builtins,
spam_full,
]
@@ -248,6 +267,7 @@ PURE_SCRIPT_FUNCTIONS = [
script_with_explicit_empty_return,
spam_minimal,
spam_with_builtins,
+ spam_raises,
spam_with_inner_not_closure,
spam_with_inner_closure,
]
diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py
index 713cbedb299..0d9c059a938 100644
--- a/Lib/test/libregrtest/main.py
+++ b/Lib/test/libregrtest/main.py
@@ -543,8 +543,6 @@ class Regrtest:
self.first_runtests = runtests
self.logger.set_tests(runtests)
- setup_process()
-
if (runtests.hunt_refleak is not None) and (not self.num_workers):
# gh-109739: WindowsLoadTracker thread interferes with refleak check
use_load_tracker = False
@@ -721,10 +719,7 @@ class Regrtest:
self._execute_python(cmd, environ)
def _init(self):
- # Set sys.stdout encoder error handler to backslashreplace,
- # similar to sys.stderr error handler, to avoid UnicodeEncodeError
- # when printing a traceback or any other non-encodable character.
- sys.stdout.reconfigure(errors="backslashreplace")
+ setup_process()
if self.junit_filename and not os.path.isabs(self.junit_filename):
self.junit_filename = os.path.abspath(self.junit_filename)
diff --git a/Lib/test/libregrtest/setup.py b/Lib/test/libregrtest/setup.py
index c3d1f60a400..9bfc414cd61 100644
--- a/Lib/test/libregrtest/setup.py
+++ b/Lib/test/libregrtest/setup.py
@@ -1,5 +1,6 @@
import faulthandler
import gc
+import io
import os
import random
import signal
@@ -52,6 +53,14 @@ def setup_process() -> None:
support.record_original_stdout(sys.stdout)
+ # Set sys.stdout encoder error handler to backslashreplace,
+ # similar to sys.stderr error handler, to avoid UnicodeEncodeError
+ # when printing a traceback or any other non-encodable character.
+ #
+ # Use an assertion to fix mypy error.
+ assert isinstance(sys.stdout, io.TextIOWrapper)
+ sys.stdout.reconfigure(errors="backslashreplace")
+
# Some times __path__ and __file__ are not absolute (e.g. while running from
# Lib/) and, if we change the CWD to run the tests in a temporary dir, some
# imports might fail. This affects only the modules imported before os.chdir().
diff --git a/Lib/test/subprocessdata/fd_status.py b/Lib/test/subprocessdata/fd_status.py
index d12bd95abee..90e785981ae 100644
--- a/Lib/test/subprocessdata/fd_status.py
+++ b/Lib/test/subprocessdata/fd_status.py
@@ -2,7 +2,7 @@
file descriptors on stdout.
Usage:
-fd_stats.py: check all file descriptors
+fd_status.py: check all file descriptors (up to 255)
fd_status.py fd1 fd2 ...: check only specified file descriptors
"""
@@ -18,7 +18,7 @@ if __name__ == "__main__":
_MAXFD = os.sysconf("SC_OPEN_MAX")
except:
_MAXFD = 256
- test_fds = range(0, _MAXFD)
+ test_fds = range(0, min(_MAXFD, 256))
else:
test_fds = map(int, sys.argv[1:])
for fd in test_fds:
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index 351d832a26d..f9b66b88d3d 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -1084,7 +1084,7 @@ def set_memlimit(limit: str) -> None:
global real_max_memuse
memlimit = _parse_memlimit(limit)
if memlimit < _2G - 1:
- raise ValueError('Memory limit {limit!r} too low to be useful')
+ raise ValueError(f'Memory limit {limit!r} too low to be useful')
real_max_memuse = memlimit
memlimit = min(memlimit, MAX_Py_ssize_t)
@@ -2358,7 +2358,7 @@ def infinite_recursion(max_depth=None):
# very deep recursion.
max_depth = 20_000
elif max_depth < 3:
- raise ValueError("max_depth must be at least 3, got {max_depth}")
+ raise ValueError(f"max_depth must be at least 3, got {max_depth}")
depth = get_recursion_depth()
depth = max(depth - 1, 1) # Ignore infinite_recursion() frame.
limit = depth + max_depth
diff --git a/Lib/test/support/interpreters/__init__.py b/Lib/test/support/interpreters/__init__.py
index e067f259364..6d1b0690805 100644
--- a/Lib/test/support/interpreters/__init__.py
+++ b/Lib/test/support/interpreters/__init__.py
@@ -226,33 +226,32 @@ class Interpreter:
if excinfo is not None:
raise ExecutionFailed(excinfo)
- def call(self, callable, /):
- """Call the object in the interpreter with given args/kwargs.
+ def _call(self, callable, args, kwargs):
+ res, excinfo = _interpreters.call(self._id, callable, args, kwargs, restrict=True)
+ if excinfo is not None:
+ raise ExecutionFailed(excinfo)
+ return res
- Only functions that take no arguments and have no closure
- are supported.
+ def call(self, callable, /, *args, **kwargs):
+ """Call the object in the interpreter with given args/kwargs.
- The return value is discarded.
+ Nearly all callables, args, kwargs, and return values are
+ supported. All "shareable" objects are supported, as are
+ "stateless" functions (meaning non-closures that do not use
+ any globals). This method will fall back to pickle.
If the callable raises an exception then the error display
- (including full traceback) is send back between the interpreters
+ (including full traceback) is sent back between the interpreters
and an ExecutionFailed exception is raised, much like what
happens with Interpreter.exec().
"""
- # XXX Support args and kwargs.
- # XXX Support arbitrary callables.
- # XXX Support returning the return value (e.g. via pickle).
- excinfo = _interpreters.call(self._id, callable, restrict=True)
- if excinfo is not None:
- raise ExecutionFailed(excinfo)
+ return self._call(callable, args, kwargs)
- def call_in_thread(self, callable, /):
+ def call_in_thread(self, callable, /, *args, **kwargs):
"""Return a new thread that calls the object in the interpreter.
The return value and any raised exception are discarded.
"""
- def task():
- self.call(callable)
- t = threading.Thread(target=task)
+ t = threading.Thread(target=self._call, args=(callable, args, kwargs))
t.start()
return t
diff --git a/Lib/test/test_ast/test_ast.py b/Lib/test/test_ast/test_ast.py
index 46745cfa8f8..cc46529c0ef 100644
--- a/Lib/test/test_ast/test_ast.py
+++ b/Lib/test/test_ast/test_ast.py
@@ -1372,17 +1372,17 @@ class ASTHelpers_Test(unittest.TestCase):
def test_dump(self):
node = ast.parse('spam(eggs, "and cheese")')
self.assertEqual(ast.dump(node),
- "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), "
- "args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')]))])"
+ "Module(body=[Expr(value=Call(func=Name(id='spam'), "
+ "args=[Name(id='eggs'), Constant(value='and cheese')]))])"
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "Module([Expr(Call(Name('spam', Load()), [Name('eggs', Load()), "
+ "Module([Expr(Call(Name('spam'), [Name('eggs'), "
"Constant('and cheese')]))])"
)
self.assertEqual(ast.dump(node, include_attributes=True),
- "Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load(), "
+ "Module(body=[Expr(value=Call(func=Name(id='spam', "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=4), "
- "args=[Name(id='eggs', ctx=Load(), lineno=1, col_offset=5, "
+ "args=[Name(id='eggs', lineno=1, col_offset=5, "
"end_lineno=1, end_col_offset=9), Constant(value='and cheese', "
"lineno=1, col_offset=11, end_lineno=1, end_col_offset=23)], "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=24), "
@@ -1396,18 +1396,18 @@ Module(
body=[
Expr(
value=Call(
- func=Name(id='spam', ctx=Load()),
+ func=Name(id='spam'),
args=[
- Name(id='eggs', ctx=Load()),
+ Name(id='eggs'),
Constant(value='and cheese')]))])""")
self.assertEqual(ast.dump(node, annotate_fields=False, indent='\t'), """\
Module(
\t[
\t\tExpr(
\t\t\tCall(
-\t\t\t\tName('spam', Load()),
+\t\t\t\tName('spam'),
\t\t\t\t[
-\t\t\t\t\tName('eggs', Load()),
+\t\t\t\t\tName('eggs'),
\t\t\t\t\tConstant('and cheese')]))])""")
self.assertEqual(ast.dump(node, include_attributes=True, indent=3), """\
Module(
@@ -1416,7 +1416,6 @@ Module(
value=Call(
func=Name(
id='spam',
- ctx=Load(),
lineno=1,
col_offset=0,
end_lineno=1,
@@ -1424,7 +1423,6 @@ Module(
args=[
Name(
id='eggs',
- ctx=Load(),
lineno=1,
col_offset=5,
end_lineno=1,
@@ -1454,23 +1452,23 @@ Module(
)
node = ast.Raise(exc=ast.Name(id='e', ctx=ast.Load()), lineno=3, col_offset=4)
self.assertEqual(ast.dump(node),
- "Raise(exc=Name(id='e', ctx=Load()))"
+ "Raise(exc=Name(id='e'))"
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "Raise(Name('e', Load()))"
+ "Raise(Name('e'))"
)
self.assertEqual(ast.dump(node, include_attributes=True),
- "Raise(exc=Name(id='e', ctx=Load()), lineno=3, col_offset=4)"
+ "Raise(exc=Name(id='e'), lineno=3, col_offset=4)"
)
self.assertEqual(ast.dump(node, annotate_fields=False, include_attributes=True),
- "Raise(Name('e', Load()), lineno=3, col_offset=4)"
+ "Raise(Name('e'), lineno=3, col_offset=4)"
)
node = ast.Raise(cause=ast.Name(id='e', ctx=ast.Load()))
self.assertEqual(ast.dump(node),
- "Raise(cause=Name(id='e', ctx=Load()))"
+ "Raise(cause=Name(id='e'))"
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "Raise(cause=Name('e', Load()))"
+ "Raise(cause=Name('e'))"
)
# Arguments:
node = ast.arguments(args=[ast.arg("x")])
@@ -1502,10 +1500,10 @@ Module(
[ast.Name('dataclass', ctx=ast.Load())],
)
self.assertEqual(ast.dump(node),
- "ClassDef(name='T', keywords=[keyword(arg='a', value=Constant(value=None))], decorator_list=[Name(id='dataclass', ctx=Load())])",
+ "ClassDef(name='T', keywords=[keyword(arg='a', value=Constant(value=None))], decorator_list=[Name(id='dataclass')])",
)
self.assertEqual(ast.dump(node, annotate_fields=False),
- "ClassDef('T', [], [keyword('a', Constant(None))], [], [Name('dataclass', Load())])",
+ "ClassDef('T', [], [keyword('a', Constant(None))], [], [Name('dataclass')])",
)
def test_dump_show_empty(self):
@@ -1533,7 +1531,7 @@ Module(
check_node(
# Corner case: there are no real `Name` instances with `id=''`:
ast.Name(id='', ctx=ast.Load()),
- empty="Name(id='', ctx=Load())",
+ empty="Name(id='')",
full="Name(id='', ctx=Load())",
)
@@ -1544,39 +1542,63 @@ Module(
)
check_node(
+ ast.MatchSingleton(value=[]),
+ empty="MatchSingleton(value=[])",
+ full="MatchSingleton(value=[])",
+ )
+
+ check_node(
ast.Constant(value=None),
empty="Constant(value=None)",
full="Constant(value=None)",
)
check_node(
+ ast.Constant(value=[]),
+ empty="Constant(value=[])",
+ full="Constant(value=[])",
+ )
+
+ check_node(
ast.Constant(value=''),
empty="Constant(value='')",
full="Constant(value='')",
)
+ check_node(
+ ast.Interpolation(value=ast.Constant(42), str=None, conversion=-1),
+ empty="Interpolation(value=Constant(value=42), str=None, conversion=-1)",
+ full="Interpolation(value=Constant(value=42), str=None, conversion=-1)",
+ )
+
+ check_node(
+ ast.Interpolation(value=ast.Constant(42), str=[], conversion=-1),
+ empty="Interpolation(value=Constant(value=42), str=[], conversion=-1)",
+ full="Interpolation(value=Constant(value=42), str=[], conversion=-1)",
+ )
+
check_text(
"def a(b: int = 0, *, c): ...",
- empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int', ctx=Load()))], kwonlyargs=[arg(arg='c')], kw_defaults=[None], defaults=[Constant(value=0)]), body=[Expr(value=Constant(value=Ellipsis))])])",
+ empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int'))], kwonlyargs=[arg(arg='c')], kw_defaults=[None], defaults=[Constant(value=0)]), body=[Expr(value=Constant(value=Ellipsis))])])",
full="Module(body=[FunctionDef(name='a', args=arguments(posonlyargs=[], args=[arg(arg='b', annotation=Name(id='int', ctx=Load()))], kwonlyargs=[arg(arg='c')], kw_defaults=[None], defaults=[Constant(value=0)]), body=[Expr(value=Constant(value=Ellipsis))], decorator_list=[], type_params=[])], type_ignores=[])",
)
check_text(
"def a(b: int = 0, *, c): ...",
- empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int', ctx=Load(), lineno=1, col_offset=9, end_lineno=1, end_col_offset=12), lineno=1, col_offset=6, end_lineno=1, end_col_offset=12)], kwonlyargs=[arg(arg='c', lineno=1, col_offset=21, end_lineno=1, end_col_offset=22)], kw_defaults=[None], defaults=[Constant(value=0, lineno=1, col_offset=15, end_lineno=1, end_col_offset=16)]), body=[Expr(value=Constant(value=Ellipsis, lineno=1, col_offset=25, end_lineno=1, end_col_offset=28), lineno=1, col_offset=25, end_lineno=1, end_col_offset=28)], lineno=1, col_offset=0, end_lineno=1, end_col_offset=28)])",
+ empty="Module(body=[FunctionDef(name='a', args=arguments(args=[arg(arg='b', annotation=Name(id='int', lineno=1, col_offset=9, end_lineno=1, end_col_offset=12), lineno=1, col_offset=6, end_lineno=1, end_col_offset=12)], kwonlyargs=[arg(arg='c', lineno=1, col_offset=21, end_lineno=1, end_col_offset=22)], kw_defaults=[None], defaults=[Constant(value=0, lineno=1, col_offset=15, end_lineno=1, end_col_offset=16)]), body=[Expr(value=Constant(value=Ellipsis, lineno=1, col_offset=25, end_lineno=1, end_col_offset=28), lineno=1, col_offset=25, end_lineno=1, end_col_offset=28)], lineno=1, col_offset=0, end_lineno=1, end_col_offset=28)])",
full="Module(body=[FunctionDef(name='a', args=arguments(posonlyargs=[], args=[arg(arg='b', annotation=Name(id='int', ctx=Load(), lineno=1, col_offset=9, end_lineno=1, end_col_offset=12), lineno=1, col_offset=6, end_lineno=1, end_col_offset=12)], kwonlyargs=[arg(arg='c', lineno=1, col_offset=21, end_lineno=1, end_col_offset=22)], kw_defaults=[None], defaults=[Constant(value=0, lineno=1, col_offset=15, end_lineno=1, end_col_offset=16)]), body=[Expr(value=Constant(value=Ellipsis, lineno=1, col_offset=25, end_lineno=1, end_col_offset=28), lineno=1, col_offset=25, end_lineno=1, end_col_offset=28)], decorator_list=[], type_params=[], lineno=1, col_offset=0, end_lineno=1, end_col_offset=28)], type_ignores=[])",
include_attributes=True,
)
check_text(
'spam(eggs, "and cheese")',
- empty="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')]))])",
+ empty="Module(body=[Expr(value=Call(func=Name(id='spam'), args=[Name(id='eggs'), Constant(value='and cheese')]))])",
full="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load()), Constant(value='and cheese')], keywords=[]))], type_ignores=[])",
)
check_text(
'spam(eggs, text="and cheese")',
- empty="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load())], keywords=[keyword(arg='text', value=Constant(value='and cheese'))]))])",
+ empty="Module(body=[Expr(value=Call(func=Name(id='spam'), args=[Name(id='eggs')], keywords=[keyword(arg='text', value=Constant(value='and cheese'))]))])",
full="Module(body=[Expr(value=Call(func=Name(id='spam', ctx=Load()), args=[Name(id='eggs', ctx=Load())], keywords=[keyword(arg='text', value=Constant(value='and cheese'))]))], type_ignores=[])",
)
@@ -1610,12 +1632,12 @@ Module(
self.assertEqual(src, ast.fix_missing_locations(src))
self.maxDiff = None
self.assertEqual(ast.dump(src, include_attributes=True),
- "Module(body=[Expr(value=Call(func=Name(id='write', ctx=Load(), "
+ "Module(body=[Expr(value=Call(func=Name(id='write', "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=5), "
"args=[Constant(value='spam', lineno=1, col_offset=6, end_lineno=1, "
"end_col_offset=12)], lineno=1, col_offset=0, end_lineno=1, "
"end_col_offset=13), lineno=1, col_offset=0, end_lineno=1, "
- "end_col_offset=13), Expr(value=Call(func=Name(id='spam', ctx=Load(), "
+ "end_col_offset=13), Expr(value=Call(func=Name(id='spam', "
"lineno=1, col_offset=0, end_lineno=1, end_col_offset=0), "
"args=[Constant(value='eggs', lineno=1, col_offset=0, end_lineno=1, "
"end_col_offset=0)], lineno=1, col_offset=0, end_lineno=1, "
@@ -3335,7 +3357,7 @@ class CommandLineTests(unittest.TestCase):
body=[
AnnAssign(
target=Name(id='x', ctx=Store()),
- annotation=Name(id='bool', ctx=Load()),
+ annotation=Name(id='bool'),
value=Constant(value=1),
simple=1)],
type_ignores=[
@@ -3363,7 +3385,7 @@ class CommandLineTests(unittest.TestCase):
expect = '''
Expression(
body=Call(
- func=Name(id='print', ctx=Load()),
+ func=Name(id='print'),
args=[
Constant(value=1),
Constant(value=2),
@@ -3379,12 +3401,11 @@ class CommandLineTests(unittest.TestCase):
expect = '''
FunctionType(
argtypes=[
- Name(id='int', ctx=Load()),
- Name(id='str', ctx=Load())],
+ Name(id='int'),
+ Name(id='str')],
returns=Subscript(
- value=Name(id='list', ctx=Load()),
- slice=Name(id='int', ctx=Load()),
- ctx=Load()))
+ value=Name(id='list'),
+ slice=Name(id='int')))
'''
for flag in ('-m=func_type', '--mode=func_type'):
with self.subTest(flag=flag):
@@ -3398,7 +3419,7 @@ class CommandLineTests(unittest.TestCase):
body=[
AnnAssign(
target=Name(id='x', ctx=Store()),
- annotation=Name(id='bool', ctx=Load()),
+ annotation=Name(id='bool'),
value=Constant(value=1),
simple=1)])
'''
@@ -3443,7 +3464,7 @@ class CommandLineTests(unittest.TestCase):
Module(
body=[
Match(
- subject=Name(id='x', ctx=Load()),
+ subject=Name(id='x'),
cases=[
match_case(
pattern=MatchValue(
@@ -3466,7 +3487,7 @@ class CommandLineTests(unittest.TestCase):
Module(
body=[
Match(
- subject=Name(id='a', ctx=Load()),
+ subject=Name(id='a'),
cases=[
match_case(
pattern=MatchValue(
@@ -3492,7 +3513,7 @@ class CommandLineTests(unittest.TestCase):
Module(
body=[
Match(
- subject=Name(id='a', ctx=Load()),
+ subject=Name(id='a'),
cases=[
match_case(
pattern=MatchValue(
diff --git a/Lib/test/test_asyncgen.py b/Lib/test/test_asyncgen.py
index 2c44647bf3e..636cb33dd98 100644
--- a/Lib/test/test_asyncgen.py
+++ b/Lib/test/test_asyncgen.py
@@ -2021,6 +2021,15 @@ class TestUnawaitedWarnings(unittest.TestCase):
g.athrow(RuntimeError)
gc_collect()
+ def test_athrow_throws_immediately(self):
+ async def gen():
+ yield 1
+
+ g = gen()
+ msg = "athrow expected at least 1 argument, got 0"
+ with self.assertRaisesRegex(TypeError, msg):
+ g.athrow()
+
def test_aclose(self):
async def gen():
yield 1
diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py
index cb6eae48414..a292ebcc7f4 100644
--- a/Lib/test/test_capi/test_opt.py
+++ b/Lib/test/test_capi/test_opt.py
@@ -1183,6 +1183,17 @@ class TestUopsOptimization(unittest.TestCase):
self.assertIsNotNone(ex)
self.assertIn("_RETURN_GENERATOR", get_opnames(ex))
+ def test_for_iter(self):
+ def testfunc(n):
+ t = 0
+ for i in set(range(n)):
+ t += i
+ return t
+ res, ex = self._run_with_optimizer(testfunc, TIER2_THRESHOLD)
+ self.assertEqual(res, TIER2_THRESHOLD * (TIER2_THRESHOLD - 1) // 2)
+ self.assertIsNotNone(ex)
+ self.assertIn("_FOR_ITER_TIER_TWO", get_opnames(ex))
+
@unittest.skip("Tracing into generators currently isn't supported.")
def test_for_iter_gen(self):
def gen(n):
diff --git a/Lib/test/test_class.py b/Lib/test/test_class.py
index 4c12d43556f..8c7a62a74ba 100644
--- a/Lib/test/test_class.py
+++ b/Lib/test/test_class.py
@@ -652,6 +652,7 @@ class ClassTests(unittest.TestCase):
a = A(hash(A.f)^(-1))
hash(a.f)
+ @cpython_only
def testSetattrWrapperNameIntern(self):
# Issue #25794: __setattr__ should intern the attribute name
class A:
diff --git a/Lib/test/test_code.py b/Lib/test/test_code.py
index 32cf8aacaf6..9fc2b047bef 100644
--- a/Lib/test/test_code.py
+++ b/Lib/test/test_code.py
@@ -701,6 +701,26 @@ class CodeTest(unittest.TestCase):
'checks': CO_FAST_LOCAL,
'res': CO_FAST_LOCAL,
},
+ defs.spam_full_args: {
+ 'a': POSONLY,
+ 'b': POSONLY,
+ 'c': POSORKW,
+ 'd': POSORKW,
+ 'e': KWONLY,
+ 'f': KWONLY,
+ 'args': VARARGS,
+ 'kwargs': VARKWARGS,
+ },
+ defs.spam_full_args_with_defaults: {
+ 'a': POSONLY,
+ 'b': POSONLY,
+ 'c': POSORKW,
+ 'd': POSORKW,
+ 'e': KWONLY,
+ 'f': KWONLY,
+ 'args': VARARGS,
+ 'kwargs': VARKWARGS,
+ },
defs.spam_args_attrs_and_builtins: {
'a': POSONLY,
'b': POSONLY,
@@ -714,6 +734,7 @@ class CodeTest(unittest.TestCase):
defs.spam_returns_arg: {
'x': POSORKW,
},
+ defs.spam_raises: {},
defs.spam_with_inner_not_closure: {
'eggs': CO_FAST_LOCAL,
},
@@ -934,6 +955,20 @@ class CodeTest(unittest.TestCase):
purelocals=5,
globalvars=6,
),
+ defs.spam_full_args: new_var_counts(
+ posonly=2,
+ posorkw=2,
+ kwonly=2,
+ varargs=1,
+ varkwargs=1,
+ ),
+ defs.spam_full_args_with_defaults: new_var_counts(
+ posonly=2,
+ posorkw=2,
+ kwonly=2,
+ varargs=1,
+ varkwargs=1,
+ ),
defs.spam_args_attrs_and_builtins: new_var_counts(
posonly=2,
posorkw=2,
@@ -945,6 +980,9 @@ class CodeTest(unittest.TestCase):
defs.spam_returns_arg: new_var_counts(
posorkw=1,
),
+ defs.spam_raises: new_var_counts(
+ globalvars=1,
+ ),
defs.spam_with_inner_not_closure: new_var_counts(
purelocals=1,
),
@@ -1097,10 +1135,16 @@ class CodeTest(unittest.TestCase):
def test_stateless(self):
self.maxDiff = None
+ STATELESS_FUNCTIONS = [
+ *defs.STATELESS_FUNCTIONS,
+ # stateless with defaults
+ defs.spam_full_args_with_defaults,
+ ]
+
for func in defs.STATELESS_CODE:
with self.subTest((func, '(code)')):
_testinternalcapi.verify_stateless_code(func.__code__)
- for func in defs.STATELESS_FUNCTIONS:
+ for func in STATELESS_FUNCTIONS:
with self.subTest((func, '(func)')):
_testinternalcapi.verify_stateless_code(func)
@@ -1110,7 +1154,7 @@ class CodeTest(unittest.TestCase):
with self.assertRaises(Exception):
_testinternalcapi.verify_stateless_code(func.__code__)
- if func not in defs.STATELESS_FUNCTIONS:
+ if func not in STATELESS_FUNCTIONS:
with self.subTest((func, '(func)')):
with self.assertRaises(Exception):
_testinternalcapi.verify_stateless_code(func)
diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py
index 9aace57633b..60feab225a1 100644
--- a/Lib/test/test_csv.py
+++ b/Lib/test/test_csv.py
@@ -1122,19 +1122,22 @@ class TestDialectValidity(unittest.TestCase):
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"quotechar" must be a 1-character string')
+ '"quotechar" must be a unicode character or None, '
+ 'not a string of length 0')
mydialect.quotechar = "''"
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"quotechar" must be a 1-character string')
+ '"quotechar" must be a unicode character or None, '
+ 'not a string of length 2')
mydialect.quotechar = 4
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"quotechar" must be string or None, not int')
+ '"quotechar" must be a unicode character or None, '
+ 'not int')
def test_delimiter(self):
class mydialect(csv.Dialect):
@@ -1151,31 +1154,32 @@ class TestDialectValidity(unittest.TestCase):
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be a 1-character string')
+ '"delimiter" must be a unicode character, '
+ 'not a string of length 3')
mydialect.delimiter = ""
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be a 1-character string')
+ '"delimiter" must be a unicode character, not a string of length 0')
mydialect.delimiter = b","
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be string, not bytes')
+ '"delimiter" must be a unicode character, not bytes')
mydialect.delimiter = 4
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be string, not int')
+ '"delimiter" must be a unicode character, not int')
mydialect.delimiter = None
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"delimiter" must be string, not NoneType')
+ '"delimiter" must be a unicode character, not NoneType')
def test_escapechar(self):
class mydialect(csv.Dialect):
@@ -1189,20 +1193,32 @@ class TestDialectValidity(unittest.TestCase):
self.assertEqual(d.escapechar, "\\")
mydialect.escapechar = ""
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be a 1-character string'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not a string of length 0')
mydialect.escapechar = "**"
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be a 1-character string'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not a string of length 2')
mydialect.escapechar = b"*"
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be string or None, not bytes'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not bytes')
mydialect.escapechar = 4
- with self.assertRaisesRegex(csv.Error, '"escapechar" must be string or None, not int'):
+ with self.assertRaises(csv.Error) as cm:
mydialect()
+ self.assertEqual(str(cm.exception),
+ '"escapechar" must be a unicode character or None, '
+ 'not int')
def test_lineterminator(self):
class mydialect(csv.Dialect):
@@ -1223,7 +1239,13 @@ class TestDialectValidity(unittest.TestCase):
with self.assertRaises(csv.Error) as cm:
mydialect()
self.assertEqual(str(cm.exception),
- '"lineterminator" must be a string')
+ '"lineterminator" must be a string, not int')
+
+ mydialect.lineterminator = None
+ with self.assertRaises(csv.Error) as cm:
+ mydialect()
+ self.assertEqual(str(cm.exception),
+ '"lineterminator" must be a string, not NoneType')
def test_invalid_chars(self):
def create_invalid(field_name, value, **kwargs):
diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py
index a10922a403e..7e8d78b8940 100644
--- a/Lib/test/test_dbm.py
+++ b/Lib/test/test_dbm.py
@@ -135,6 +135,67 @@ class AnyDBMTestCase:
assert(f[key] == b"Python:")
f.close()
+ def test_anydbm_readonly_reorganize(self):
+ self.init_db()
+ with dbm.open(_fname, 'r') as d:
+ # Early stopping.
+ if not hasattr(d, 'reorganize'):
+ self.skipTest("method reorganize not available this dbm submodule")
+
+ self.assertRaises(dbm.error, lambda: d.reorganize())
+
+ def test_anydbm_reorganize_not_changed_content(self):
+ self.init_db()
+ with dbm.open(_fname, 'c') as d:
+ # Early stopping.
+ if not hasattr(d, 'reorganize'):
+ self.skipTest("method reorganize not available this dbm submodule")
+
+ keys_before = sorted(d.keys())
+ values_before = [d[k] for k in keys_before]
+ d.reorganize()
+ keys_after = sorted(d.keys())
+ values_after = [d[k] for k in keys_before]
+ self.assertEqual(keys_before, keys_after)
+ self.assertEqual(values_before, values_after)
+
+ def test_anydbm_reorganize_decreased_size(self):
+
+ def _calculate_db_size(db_path):
+ if os.path.isfile(db_path):
+ return os.path.getsize(db_path)
+ total_size = 0
+ for root, _, filenames in os.walk(db_path):
+ for filename in filenames:
+ file_path = os.path.join(root, filename)
+ total_size += os.path.getsize(file_path)
+ return total_size
+
+ # This test requires relatively large databases to reliably show difference in size before and after reorganizing.
+ with dbm.open(_fname, 'n') as f:
+ # Early stopping.
+ if not hasattr(f, 'reorganize'):
+ self.skipTest("method reorganize not available this dbm submodule")
+
+ for k in self._dict:
+ f[k.encode('ascii')] = self._dict[k] * 100000
+ db_keys = list(f.keys())
+
+ # Make sure to calculate size of database only after file is closed to ensure file content are flushed to disk.
+ size_before = _calculate_db_size(os.path.dirname(_fname))
+
+ # Delete some elements from the start of the database.
+ keys_to_delete = db_keys[:len(db_keys) // 2]
+ with dbm.open(_fname, 'c') as f:
+ for k in keys_to_delete:
+ del f[k]
+ f.reorganize()
+
+ # Make sure to calculate size of database only after file is closed to ensure file content are flushed to disk.
+ size_after = _calculate_db_size(os.path.dirname(_fname))
+
+ self.assertLess(size_after, size_before)
+
def test_open_with_bytes(self):
dbm.open(os.fsencode(_fname), "c").close()
diff --git a/Lib/test/test_dbm_gnu.py b/Lib/test/test_dbm_gnu.py
index 66268c42a30..e0b988b7b95 100644
--- a/Lib/test/test_dbm_gnu.py
+++ b/Lib/test/test_dbm_gnu.py
@@ -74,12 +74,12 @@ class TestGdbm(unittest.TestCase):
# Test the flag parameter open() by trying all supported flag modes.
all = set(gdbm.open_flags)
# Test standard flags (presumably "crwn").
- modes = all - set('fsu')
+ modes = all - set('fsum')
for mode in sorted(modes): # put "c" mode first
self.g = gdbm.open(filename, mode)
self.g.close()
- # Test additional flags (presumably "fsu").
+ # Test additional flags (presumably "fsum").
flags = all - set('crwn')
for mode in modes:
for flag in flags:
@@ -217,6 +217,29 @@ class TestGdbm(unittest.TestCase):
create_empty_file(os.path.join(d, 'test'))
self.assertRaises(gdbm.error, gdbm.open, filename, 'r')
+ @unittest.skipUnless('m' in gdbm.open_flags, "requires 'm' in open_flags")
+ def test_nommap_no_crash(self):
+ self.g = g = gdbm.open(filename, 'nm')
+ os.truncate(filename, 0)
+
+ g.get(b'a', b'c')
+ g.keys()
+ g.firstkey()
+ g.nextkey(b'a')
+ with self.assertRaises(KeyError):
+ g[b'a']
+ with self.assertRaises(gdbm.error):
+ len(g)
+
+ with self.assertRaises(gdbm.error):
+ g[b'a'] = b'c'
+ with self.assertRaises(gdbm.error):
+ del g[b'a']
+ with self.assertRaises(gdbm.error):
+ g.setdefault(b'a', b'c')
+ with self.assertRaises(gdbm.error):
+ g.reorganize()
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py
index c0a1e378583..ef64b878805 100644
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -981,6 +981,7 @@ class FormatTest:
('.0f', '0e-2', '0'),
('.0f', '3.14159265', '3'),
('.1f', '3.14159265', '3.1'),
+ ('.01f', '3.14159265', '3.1'), # leading zero in precision
('.4f', '3.14159265', '3.1416'),
('.6f', '3.14159265', '3.141593'),
('.7f', '3.14159265', '3.1415926'), # round-half-even!
@@ -1066,6 +1067,7 @@ class FormatTest:
('8,', '123456', ' 123,456'),
('08,', '123456', '0,123,456'), # special case: extra 0 needed
('+08,', '123456', '+123,456'), # but not if there's a sign
+ ('008,', '123456', '0,123,456'), # leading zero in width
(' 08,', '123456', ' 123,456'),
('08,', '-123456', '-123,456'),
('+09,', '123456', '+0,123,456'),
diff --git a/Lib/test/test_dis.py b/Lib/test/test_dis.py
index ec930a728aa..355990ed58e 100644
--- a/Lib/test/test_dis.py
+++ b/Lib/test/test_dis.py
@@ -606,7 +606,7 @@ dis_asyncwith = """\
POP_TOP
L1: RESUME 0
-%4d LOAD_FAST_BORROW 0 (c)
+%4d LOAD_FAST 0 (c)
COPY 1
LOAD_SPECIAL 3 (__aexit__)
SWAP 2
diff --git a/Lib/test/test_doctest/test_doctest.py b/Lib/test/test_doctest/test_doctest.py
index c5b247797c3..72763d4a013 100644
--- a/Lib/test/test_doctest/test_doctest.py
+++ b/Lib/test/test_doctest/test_doctest.py
@@ -2269,20 +2269,22 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite(test.test_doctest.sample_doctest)
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
>>> for tst, _ in result.failures:
... print(tst)
- bad (test.test_doctest.sample_doctest.__test__)
- foo (test.test_doctest.sample_doctest)
- test_silly_setup (test.test_doctest.sample_doctest)
- y_is_one (test.test_doctest.sample_doctest)
+ bad (test.test_doctest.sample_doctest.__test__) [0]
+ foo (test.test_doctest.sample_doctest) [0]
+ >>> for tst, _ in result.errors:
+ ... print(tst)
+ test_silly_setup (test.test_doctest.sample_doctest) [1]
+ y_is_one (test.test_doctest.sample_doctest) [0]
We can also supply the module by name:
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest')
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
The module need not contain any doctest examples:
@@ -2304,21 +2306,26 @@ def test_DocTestSuite():
>>> result
<unittest.result.TestResult run=6 errors=0 failures=2>
>>> len(result.skipped)
- 2
+ 7
>>> for tst, _ in result.skipped:
... print(tst)
+ double_skip (test.test_doctest.sample_doctest_skip) [0]
+ double_skip (test.test_doctest.sample_doctest_skip) [1]
double_skip (test.test_doctest.sample_doctest_skip)
+ partial_skip_fail (test.test_doctest.sample_doctest_skip) [0]
+ partial_skip_pass (test.test_doctest.sample_doctest_skip) [0]
+ single_skip (test.test_doctest.sample_doctest_skip) [0]
single_skip (test.test_doctest.sample_doctest_skip)
>>> for tst, _ in result.failures:
... print(tst)
- no_skip_fail (test.test_doctest.sample_doctest_skip)
- partial_skip_fail (test.test_doctest.sample_doctest_skip)
+ no_skip_fail (test.test_doctest.sample_doctest_skip) [0]
+ partial_skip_fail (test.test_doctest.sample_doctest_skip) [1]
We can use the current module:
>>> suite = test.test_doctest.sample_doctest.test_suite()
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
We can also provide a DocTestFinder:
@@ -2326,7 +2333,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... test_finder=finder)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
The DocTestFinder need not return any tests:
@@ -2342,7 +2349,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', globs={})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=5>
+ <unittest.result.TestResult run=9 errors=3 failures=2>
Alternatively, we can provide extra globals. Here we'll make an
error go away by providing an extra global variable:
@@ -2350,7 +2357,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... extraglobs={'y': 1})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=3>
+ <unittest.result.TestResult run=9 errors=1 failures=2>
You can pass option flags. Here we'll cause an extra error
by disabling the blank-line feature:
@@ -2358,7 +2365,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=5>
+ <unittest.result.TestResult run=9 errors=2 failures=3>
You can supply setUp and tearDown functions:
@@ -2375,7 +2382,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest',
... setUp=setUp, tearDown=tearDown)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=3>
+ <unittest.result.TestResult run=9 errors=1 failures=2>
But the tearDown restores sanity:
@@ -2393,7 +2400,7 @@ def test_DocTestSuite():
>>> suite = doctest.DocTestSuite('test.test_doctest.sample_doctest', setUp=setUp)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=3>
+ <unittest.result.TestResult run=9 errors=1 failures=2>
Here, we didn't need to use a tearDown function because we
modified the test globals, which are a copy of the
@@ -2409,162 +2416,97 @@ def test_DocTestSuite_errors():
>>> suite = doctest.DocTestSuite(mod)
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=4 errors=0 failures=4>
+ <unittest.result.TestResult run=4 errors=6 failures=3>
>>> print(result.failures[0][1]) # doctest: +ELLIPSIS
Traceback (most recent call last):
- File ...
- raise self.failureException(self.format_failure(new.getvalue()))
- AssertionError: Failed doctest test for test.test_doctest.sample_doctest_errors
- File "...sample_doctest_errors.py", line 0, in sample_doctest_errors
- <BLANKLINE>
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line 5, in test.test_doctest.sample_doctest_errors
- Failed example:
+ File "...sample_doctest_errors.py", line 5, in test.test_doctest.sample_doctest_errors
+ >...>> 2 + 2
+ AssertionError: Failed example:
2 + 2
Expected:
5
Got:
4
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line 7, in test.test_doctest.sample_doctest_errors
- Failed example:
- 1/0
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "<doctest test.test_doctest.sample_doctest_errors[1]>", line 1, in <module>
- 1/0
- ~^~
- ZeroDivisionError: division by zero
- <BLANKLINE>
<BLANKLINE>
>>> print(result.failures[1][1]) # doctest: +ELLIPSIS
Traceback (most recent call last):
- File ...
- raise self.failureException(self.format_failure(new.getvalue()))
- AssertionError: Failed doctest test for test.test_doctest.sample_doctest_errors.__test__.bad
- File "...sample_doctest_errors.py", line unknown line number, in bad
- <BLANKLINE>
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line ?, in test.test_doctest.sample_doctest_errors.__test__.bad
- Failed example:
+ File "...sample_doctest_errors.py", line None, in test.test_doctest.sample_doctest_errors.__test__.bad
+ AssertionError: Failed example:
2 + 2
Expected:
5
Got:
4
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line ?, in test.test_doctest.sample_doctest_errors.__test__.bad
- Failed example:
- 1/0
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "<doctest test.test_doctest.sample_doctest_errors.__test__.bad[1]>", line 1, in <module>
- 1/0
- ~^~
- ZeroDivisionError: division by zero
- <BLANKLINE>
<BLANKLINE>
>>> print(result.failures[2][1]) # doctest: +ELLIPSIS
Traceback (most recent call last):
- File ...
- raise self.failureException(self.format_failure(new.getvalue()))
- AssertionError: Failed doctest test for test.test_doctest.sample_doctest_errors.errors
- File "...sample_doctest_errors.py", line 14, in errors
- <BLANKLINE>
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line 16, in test.test_doctest.sample_doctest_errors.errors
- Failed example:
+ File "...sample_doctest_errors.py", line 16, in test.test_doctest.sample_doctest_errors.errors
+ >...>> 2 + 2
+ AssertionError: Failed example:
2 + 2
Expected:
5
Got:
4
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line 18, in test.test_doctest.sample_doctest_errors.errors
- Failed example:
+ <BLANKLINE>
+ >>> print(result.errors[0][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 7, in test.test_doctest.sample_doctest_errors
+ >...>> 1/0
+ File "<doctest test.test_doctest.sample_doctest_errors[1]>", line 1, in <module>
1/0
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "<doctest test.test_doctest.sample_doctest_errors.errors[1]>", line 1, in <module>
- 1/0
- ~^~
- ZeroDivisionError: division by zero
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line 23, in test.test_doctest.sample_doctest_errors.errors
- Failed example:
- f()
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "<doctest test.test_doctest.sample_doctest_errors.errors[3]>", line 1, in <module>
- f()
- ~^^
- File "<doctest test.test_doctest.sample_doctest_errors.errors[2]>", line 2, in f
- 2 + '2'
- ~~^~~~~
- TypeError: ...
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line 25, in test.test_doctest.sample_doctest_errors.errors
- Failed example:
- g()
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "<doctest test.test_doctest.sample_doctest_errors.errors[4]>", line 1, in <module>
- g()
- ~^^
- File "...sample_doctest_errors.py", line 12, in g
- [][0] # line 12
- ~~^^^
- IndexError: list index out of range
+ ~^~
+ ZeroDivisionError: division by zero
<BLANKLINE>
+ >>> print(result.errors[1][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line None, in test.test_doctest.sample_doctest_errors.__test__.bad
+ File "<doctest test.test_doctest.sample_doctest_errors.__test__.bad[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
<BLANKLINE>
- >>> print(result.failures[3][1]) # doctest: +ELLIPSIS
+ >>> print(result.errors[2][1]) # doctest: +ELLIPSIS
Traceback (most recent call last):
- File ...
- raise self.failureException(self.format_failure(new.getvalue()))
- AssertionError: Failed doctest test for test.test_doctest.sample_doctest_errors.syntax_error
- File "...sample_doctest_errors.py", line 29, in syntax_error
+ File "...sample_doctest_errors.py", line 18, in test.test_doctest.sample_doctest_errors.errors
+ >...>> 1/0
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[1]>", line 1, in <module>
+ 1/0
+ ~^~
+ ZeroDivisionError: division by zero
<BLANKLINE>
- ----------------------------------------------------------------------
- File "...sample_doctest_errors.py", line 31, in test.test_doctest.sample_doctest_errors.syntax_error
- Failed example:
- 2+*3
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^
- File "<doctest test.test_doctest.sample_doctest_errors.syntax_error[0]>", line 1
- 2+*3
- ^
- SyntaxError: invalid syntax
+ >>> print(result.errors[3][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 23, in test.test_doctest.sample_doctest_errors.errors
+ >...>> f()
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[3]>", line 1, in <module>
+ f()
+ ~^^
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[2]>", line 2, in f
+ 2 + '2'
+ ~~^~~~~
+ TypeError: ...
<BLANKLINE>
+ >>> print(result.errors[4][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 25, in test.test_doctest.sample_doctest_errors.errors
+ >...>> g()
+ File "<doctest test.test_doctest.sample_doctest_errors.errors[4]>", line 1, in <module>
+ g()
+ ~^^
+ File "...sample_doctest_errors.py", line 12, in g
+ [][0] # line 12
+ ~~^^^
+ IndexError: list index out of range
+ <BLANKLINE>
+ >>> print(result.errors[5][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...sample_doctest_errors.py", line 31, in test.test_doctest.sample_doctest_errors.syntax_error
+ >...>> 2+*3
+ File "<doctest test.test_doctest.sample_doctest_errors.syntax_error[0]>", line 1
+ 2+*3
+ ^
+ SyntaxError: invalid syntax
<BLANKLINE>
"""
@@ -2579,7 +2521,7 @@ def test_DocFileSuite():
... 'test_doctest2.txt',
... 'test_doctest4.txt')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=2 failures=0>
The test files are looked for in the directory containing the
calling module. A package keyword argument can be provided to
@@ -2591,14 +2533,14 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... package='test.test_doctest')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=2 failures=0>
'/' should be used as a path separator. It will be converted
to a native separator at run time:
>>> suite = doctest.DocFileSuite('../test_doctest/test_doctest.txt')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=1 errors=0 failures=1>
+ <unittest.result.TestResult run=1 errors=1 failures=0>
If DocFileSuite is used from an interactive session, then files
are resolved relative to the directory of sys.argv[0]:
@@ -2624,7 +2566,7 @@ def test_DocFileSuite():
>>> suite = doctest.DocFileSuite(test_file, module_relative=False)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=1 errors=0 failures=1>
+ <unittest.result.TestResult run=1 errors=1 failures=0>
It is an error to specify `package` when `module_relative=False`:
@@ -2642,12 +2584,15 @@ def test_DocFileSuite():
... 'test_doctest_skip2.txt')
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=4 errors=0 failures=1>
+ <unittest.result.TestResult run=4 errors=1 failures=0>
>>> len(result.skipped)
- 1
+ 4
>>> for tst, _ in result.skipped: # doctest: +ELLIPSIS
... print('=', tst)
+ = ...test_doctest_skip.txt [0]
+ = ...test_doctest_skip.txt [1]
= ...test_doctest_skip.txt
+ = ...test_doctest_skip2.txt [0]
You can specify initial global variables:
@@ -2656,7 +2601,7 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... globs={'favorite_color': 'blue'})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=1>
+ <unittest.result.TestResult run=3 errors=1 failures=0>
In this case, we supplied a missing favorite color. You can
provide doctest options:
@@ -2667,7 +2612,7 @@ def test_DocFileSuite():
... optionflags=doctest.DONT_ACCEPT_BLANKLINE,
... globs={'favorite_color': 'blue'})
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=1 failures=1>
And, you can provide setUp and tearDown functions:
@@ -2686,7 +2631,7 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... setUp=setUp, tearDown=tearDown)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=1>
+ <unittest.result.TestResult run=3 errors=1 failures=0>
But the tearDown restores sanity:
@@ -2728,7 +2673,7 @@ def test_DocFileSuite():
... 'test_doctest4.txt',
... encoding='utf-8')
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=3 errors=0 failures=2>
+ <unittest.result.TestResult run=3 errors=2 failures=0>
"""
def test_DocFileSuite_errors():
@@ -2738,72 +2683,49 @@ def test_DocFileSuite_errors():
>>> suite = doctest.DocFileSuite('test_doctest_errors.txt')
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=1 errors=0 failures=1>
+ <unittest.result.TestResult run=1 errors=3 failures=1>
>>> print(result.failures[0][1]) # doctest: +ELLIPSIS
Traceback (most recent call last):
- File ...
- raise self.failureException(self.format_failure(new.getvalue()))
- AssertionError: Failed doctest test for test_doctest_errors.txt
- File "...test_doctest_errors.txt", line 0
- <BLANKLINE>
- ----------------------------------------------------------------------
- File "...test_doctest_errors.txt", line 4, in test_doctest_errors.txt
- Failed example:
+ File "...test_doctest_errors.txt", line 4, in test_doctest_errors.txt
+ >...>> 2 + 2
+ AssertionError: Failed example:
2 + 2
Expected:
5
Got:
4
- ----------------------------------------------------------------------
- File "...test_doctest_errors.txt", line 6, in test_doctest_errors.txt
- Failed example:
+ <BLANKLINE>
+ >>> print(result.errors[0][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...test_doctest_errors.txt", line 6, in test_doctest_errors.txt
+ >...>> 1/0
+ File "<doctest test_doctest_errors.txt[1]>", line 1, in <module>
1/0
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "<doctest test_doctest_errors.txt[1]>", line 1, in <module>
- 1/0
- ~^~
- ZeroDivisionError: division by zero
- ----------------------------------------------------------------------
- File "...test_doctest_errors.txt", line 11, in test_doctest_errors.txt
- Failed example:
+ ~^~
+ ZeroDivisionError: division by zero
+ <BLANKLINE>
+ >>> print(result.errors[1][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...test_doctest_errors.txt", line 11, in test_doctest_errors.txt
+ >...>> f()
+ File "<doctest test_doctest_errors.txt[3]>", line 1, in <module>
f()
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- File "<doctest test_doctest_errors.txt[3]>", line 1, in <module>
- f()
- ~^^
- File "<doctest test_doctest_errors.txt[2]>", line 2, in f
- 2 + '2'
- ~~^~~~~
- TypeError: ...
- ----------------------------------------------------------------------
- File "...test_doctest_errors.txt", line 13, in test_doctest_errors.txt
- Failed example:
- 2+*3
- Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^
- File "<doctest test_doctest_errors.txt[4]>", line 1
- 2+*3
- ^
- SyntaxError: invalid syntax
+ ~^^
+ File "<doctest test_doctest_errors.txt[2]>", line 2, in f
+ 2 + '2'
+ ~~^~~~~
+ TypeError: ...
<BLANKLINE>
+ >>> print(result.errors[2][1]) # doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ File "...test_doctest_errors.txt", line 13, in test_doctest_errors.txt
+ >...>> 2+*3
+ File "<doctest test_doctest_errors.txt[4]>", line 1
+ 2+*3
+ ^
+ SyntaxError: invalid syntax
<BLANKLINE>
+
"""
def test_trailing_space_in_test():
@@ -2874,15 +2796,25 @@ def test_unittest_reportflags():
>>> import unittest
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=1 errors=0 failures=1>
+ <unittest.result.TestResult run=1 errors=1 failures=1>
>>> print(result.failures[0][1]) # doctest: +ELLIPSIS
- Traceback ...
- Failed example:
- favorite_color
- ...
- Failed example:
+ Traceback (most recent call last):
+ File ...
+ >...>> if 1:
+ AssertionError: Failed example:
if 1:
- ...
+ print('a')
+ print()
+ print('b')
+ Expected:
+ a
+ <BLANKLINE>
+ b
+ Got:
+ a
+ <BLANKLINE>
+ b
+ <BLANKLINE>
Note that we see both failures displayed.
@@ -2891,18 +2823,8 @@ def test_unittest_reportflags():
Now, when we run the test:
- >>> result = suite.run(unittest.TestResult())
- >>> result
- <unittest.result.TestResult run=1 errors=0 failures=1>
- >>> print(result.failures[0][1]) # doctest: +ELLIPSIS
- Traceback ...
- Failed example:
- favorite_color
- Exception raised:
- ...
- NameError: name 'favorite_color' is not defined
- <BLANKLINE>
- <BLANKLINE>
+ >>> suite.run(unittest.TestResult())
+ <unittest.result.TestResult run=1 errors=1 failures=0>
We get only the first failure.
@@ -2912,21 +2834,20 @@ def test_unittest_reportflags():
>>> suite = doctest.DocFileSuite('test_doctest.txt',
... optionflags=doctest.DONT_ACCEPT_BLANKLINE | doctest.REPORT_NDIFF)
- Then the default eporting options are ignored:
+ Then the default reporting options are ignored:
>>> result = suite.run(unittest.TestResult())
>>> result
- <unittest.result.TestResult run=1 errors=0 failures=1>
+ <unittest.result.TestResult run=1 errors=1 failures=1>
*NOTE*: These doctest are intentionally not placed in raw string to depict
the trailing whitespace using `\x20` in the diff below.
>>> print(result.failures[0][1]) # doctest: +ELLIPSIS
Traceback ...
- Failed example:
- favorite_color
- ...
- Failed example:
+ File ...
+ >...>> if 1:
+ AssertionError: Failed example:
if 1:
print('a')
print()
@@ -2937,7 +2858,6 @@ def test_unittest_reportflags():
+\x20
b
<BLANKLINE>
- <BLANKLINE>
Test runners can restore the formatting flags after they run:
@@ -3145,11 +3065,6 @@ Tests for error reporting in the testfile() function.
1/0
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest test_doctest_errors.txt[1]>", line 1, in <module>
1/0
~^~
@@ -3160,11 +3075,6 @@ Tests for error reporting in the testfile() function.
f()
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest test_doctest_errors.txt[3]>", line 1, in <module>
f()
~^^
@@ -3177,12 +3087,6 @@ Tests for error reporting in the testfile() function.
Failed example:
2+*3
Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^
File "<doctest test_doctest_errors.txt[4]>", line 1
2+*3
^
@@ -3343,11 +3247,6 @@ Tests for error reporting in the testmod() function.
1/0
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest test.test_doctest.sample_doctest_errors[1]>", line 1, in <module>
1/0
~^~
@@ -3366,11 +3265,6 @@ Tests for error reporting in the testmod() function.
1/0
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest test.test_doctest.sample_doctest_errors.__test__.bad[1]>", line 1, in <module>
1/0
~^~
@@ -3389,11 +3283,6 @@ Tests for error reporting in the testmod() function.
1/0
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest test.test_doctest.sample_doctest_errors.errors[1]>", line 1, in <module>
1/0
~^~
@@ -3404,11 +3293,6 @@ Tests for error reporting in the testmod() function.
f()
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest test.test_doctest.sample_doctest_errors.errors[3]>", line 1, in <module>
f()
~^^
@@ -3422,11 +3306,6 @@ Tests for error reporting in the testmod() function.
g()
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest test.test_doctest.sample_doctest_errors.errors[4]>", line 1, in <module>
g()
~^^
@@ -3439,12 +3318,6 @@ Tests for error reporting in the testmod() function.
Failed example:
2+*3
Exception raised:
- Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^
File "<doctest test.test_doctest.sample_doctest_errors.syntax_error[0]>", line 1
2+*3
^
@@ -3490,11 +3363,6 @@ Check doctest with a non-ascii filename:
raise Exception('clé')
Exception raised:
Traceback (most recent call last):
- File ...
- exec(compile(example.source, filename, "single",
- ~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
- compileflags, True), test.globs)
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "<doctest foo-bär@baz[0]>", line 1, in <module>
raise Exception('clé')
Exception: clé
@@ -3787,9 +3655,9 @@ def test_run_doctestsuite_multiple_times():
>>> import test.test_doctest.sample_doctest
>>> suite = doctest.DocTestSuite(test.test_doctest.sample_doctest)
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
>>> suite.run(unittest.TestResult())
- <unittest.result.TestResult run=9 errors=0 failures=4>
+ <unittest.result.TestResult run=9 errors=2 failures=2>
"""
diff --git a/Lib/test/test_email/test__header_value_parser.py b/Lib/test/test_email/test__header_value_parser.py
index fd4ac2c404c..179e236ecdf 100644
--- a/Lib/test/test_email/test__header_value_parser.py
+++ b/Lib/test/test_email/test__header_value_parser.py
@@ -2491,6 +2491,38 @@ class TestParser(TestParserMixin, TestEmailBase):
self.assertEqual(address.all_mailboxes[0].domain, 'example.com')
self.assertEqual(address.all_mailboxes[0].addr_spec, '"example example"@example.com')
+ def test_get_address_with_invalid_domain(self):
+ address = self._test_get_x(parser.get_address,
+ '<T@[',
+ '<T@[]>',
+ '<T@[]>',
+ [errors.InvalidHeaderDefect, # missing trailing '>' on angle-addr
+ errors.InvalidHeaderDefect, # end of input inside domain-literal
+ ],
+ '')
+ self.assertEqual(address.token_type, 'address')
+ self.assertEqual(len(address.mailboxes), 0)
+ self.assertEqual(len(address.all_mailboxes), 1)
+ self.assertEqual(address.all_mailboxes[0].domain, '[]')
+ self.assertEqual(address.all_mailboxes[0].local_part, 'T')
+ self.assertEqual(address.all_mailboxes[0].token_type, 'invalid-mailbox')
+ self.assertEqual(address[0].token_type, 'invalid-mailbox')
+
+ address = self._test_get_x(parser.get_address,
+ '!an??:=m==fr2@[C',
+ '!an??:=m==fr2@[C];',
+ '!an??:=m==fr2@[C];',
+ [errors.InvalidHeaderDefect, # end of header in group
+ errors.InvalidHeaderDefect, # end of input inside domain-literal
+ ],
+ '')
+ self.assertEqual(address.token_type, 'address')
+ self.assertEqual(len(address.mailboxes), 0)
+ self.assertEqual(len(address.all_mailboxes), 1)
+ self.assertEqual(address.all_mailboxes[0].domain, '[C]')
+ self.assertEqual(address.all_mailboxes[0].local_part, '=m==fr2')
+ self.assertEqual(address.all_mailboxes[0].token_type, 'invalid-mailbox')
+ self.assertEqual(address[0].token_type, 'group')
# get_address_list
@@ -2765,6 +2797,19 @@ class TestParser(TestParserMixin, TestEmailBase):
)
self.assertEqual(message_id.token_type, 'message-id')
+ def test_parse_message_id_with_invalid_domain(self):
+ message_id = self._test_parse_x(
+ parser.parse_message_id,
+ "<T@[",
+ "<T@[]>",
+ "<T@[]>",
+ [errors.ObsoleteHeaderDefect] + [errors.InvalidHeaderDefect] * 2,
+ [],
+ )
+ self.assertEqual(message_id.token_type, 'message-id')
+ self.assertEqual(str(message_id.all_defects[-1]),
+ "end of input inside domain-literal")
+
def test_parse_message_id_with_remaining(self):
message_id = self._test_parse_x(
parser.parse_message_id,
diff --git a/Lib/test/test_external_inspection.py b/Lib/test/test_external_inspection.py
index 2b4b63a030b..303af25fc7a 100644
--- a/Lib/test/test_external_inspection.py
+++ b/Lib/test/test_external_inspection.py
@@ -114,17 +114,17 @@ class TestGetStackTrace(unittest.TestCase):
p.wait(timeout=SHORT_TIMEOUT)
thread_expected_stack_trace = [
- ("foo", script_name, 15),
- ("baz", script_name, 12),
- ("bar", script_name, 9),
- ('Thread.run', threading.__file__, ANY)
+ (script_name, 15, "foo"),
+ (script_name, 12, "baz"),
+ (script_name, 9, "bar"),
+ (threading.__file__, ANY, 'Thread.run')
]
# Is possible that there are more threads, so we check that the
# expected stack traces are in the result (looking at you Windows!)
self.assertIn((ANY, thread_expected_stack_trace), stack_trace)
# Check that the main thread stack trace is in the result
- frame = ("<module>", script_name, 19)
+ frame = (script_name, 19, "<module>")
for _, stack in stack_trace:
if frame in stack:
break
@@ -222,47 +222,47 @@ class TestGetStackTrace(unittest.TestCase):
root_task = "Task-1"
expected_stack_trace = [
[
- ("c5", script_name, 10),
- ("c4", script_name, 14),
- ("c3", script_name, 17),
- ("c2", script_name, 20),
+ (script_name, 10, "c5"),
+ (script_name, 14, "c4"),
+ (script_name, 17, "c3"),
+ (script_name, 20, "c2"),
],
"c2_root",
[
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit"
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__"
),
- ("main", script_name, 26),
+ (script_name, 26, "main"),
],
"Task-1",
[],
],
[
- [("c1", script_name, 23)],
+ [(script_name, 23, "c1")],
"sub_main_1",
[
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit"
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__"
),
- ("main", script_name, 26),
+ (script_name, 26, "main"),
],
"Task-1",
[],
@@ -270,22 +270,22 @@ class TestGetStackTrace(unittest.TestCase):
],
],
[
- [("c1", script_name, 23)],
+ [(script_name, 23, "c1")],
"sub_main_2",
[
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit"
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__"
),
- ("main", script_name, 26),
+ (script_name, 26, "main"),
],
"Task-1",
[],
@@ -363,9 +363,9 @@ class TestGetStackTrace(unittest.TestCase):
expected_stack_trace = [
[
- ("gen_nested_call", script_name, 10),
- ("gen", script_name, 16),
- ("main", script_name, 19),
+ (script_name, 10, "gen_nested_call"),
+ (script_name, 16, "gen"),
+ (script_name, 19, "main"),
],
"Task-1",
[],
@@ -439,9 +439,9 @@ class TestGetStackTrace(unittest.TestCase):
stack_trace[2].sort(key=lambda x: x[1])
expected_stack_trace = [
- [("deep", script_name, 11), ("c1", script_name, 15)],
+ [(script_name, 11, "deep"), (script_name, 15, "c1")],
"Task-2",
- [[[("main", script_name, 21)], "Task-1", []]],
+ [[[(script_name, 21, "main")], "Task-1", []]],
]
self.assertEqual(stack_trace, expected_stack_trace)
@@ -515,16 +515,16 @@ class TestGetStackTrace(unittest.TestCase):
stack_trace[2].sort(key=lambda x: x[1])
expected_stack_trace = [
[
- ("deep", script_name, 11),
- ("c1", script_name, 15),
- ("staggered_race.<locals>.run_one_coro", staggered.__file__, ANY),
+ (script_name, 11, "deep"),
+ (script_name, 15, "c1"),
+ (staggered.__file__, ANY, "staggered_race.<locals>.run_one_coro"),
],
"Task-2",
[
[
[
- ("staggered_race", staggered.__file__, ANY),
- ("main", script_name, 21),
+ (staggered.__file__, ANY, "staggered_race"),
+ (script_name, 21, "main"),
],
"Task-1",
[],
@@ -662,16 +662,16 @@ class TestGetStackTrace(unittest.TestCase):
self.assertIn((ANY, "Task-1", []), entries)
main_stack = [
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit",
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__",
),
- ("main", script_name, 60),
+ (script_name, 60, "main"),
]
self.assertIn(
(ANY, "server task", [[main_stack, ANY]]),
@@ -686,16 +686,16 @@ class TestGetStackTrace(unittest.TestCase):
[
[
(
- "TaskGroup._aexit",
taskgroups.__file__,
ANY,
+ "TaskGroup._aexit",
),
(
- "TaskGroup.__aexit__",
taskgroups.__file__,
ANY,
+ "TaskGroup.__aexit__",
),
- ("echo_client_spam", script_name, 41),
+ (script_name, 41, "echo_client_spam"),
],
ANY,
]
@@ -741,14 +741,14 @@ class TestGetStackTrace(unittest.TestCase):
stack[:2],
[
(
- "get_stack_trace",
__file__,
get_stack_trace.__code__.co_firstlineno + 2,
+ "get_stack_trace",
),
(
- "TestGetStackTrace.test_self_trace",
__file__,
self.test_self_trace.__code__.co_firstlineno + 6,
+ "TestGetStackTrace.test_self_trace",
),
]
)
diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py
index 96b3f305194..d1d2739856c 100644
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -1518,6 +1518,8 @@ class FractionTest(unittest.TestCase):
(F(51, 1000), '.1f', '0.1'),
(F(149, 1000), '.1f', '0.1'),
(F(151, 1000), '.1f', '0.2'),
+ (F(22, 7), '.02f', '3.14'), # issue gh-130662
+ (F(22, 7), '005.02f', '03.14'),
]
for fraction, spec, expected in testcases:
with self.subTest(fraction=fraction, spec=spec):
@@ -1616,12 +1618,6 @@ class FractionTest(unittest.TestCase):
'=010%',
'>00.2f',
'>00f',
- # Too many zeros - minimum width should not have leading zeros
- '006f',
- # Leading zeros in precision
- '.010f',
- '.02f',
- '.000f',
# Missing precision
'.e',
'.f',
diff --git a/Lib/test/test_free_threading/test_itertools_batched.py b/Lib/test/test_free_threading/test_itertools.py
index a754b4f9ea9..8360afbf78c 100644
--- a/Lib/test/test_free_threading/test_itertools_batched.py
+++ b/Lib/test/test_free_threading/test_itertools.py
@@ -1,15 +1,15 @@
import unittest
from threading import Thread, Barrier
-from itertools import batched
+from itertools import batched, cycle
from test.support import threading_helper
threading_helper.requires_working_threading(module=True)
-class EnumerateThreading(unittest.TestCase):
+class ItertoolsThreading(unittest.TestCase):
@threading_helper.reap_threads
- def test_threading(self):
+ def test_batched(self):
number_of_threads = 10
number_of_iterations = 20
barrier = Barrier(number_of_threads)
@@ -34,5 +34,31 @@ class EnumerateThreading(unittest.TestCase):
barrier.reset()
+ @threading_helper.reap_threads
+ def test_cycle(self):
+ number_of_threads = 6
+ number_of_iterations = 10
+ number_of_cycles = 400
+
+ barrier = Barrier(number_of_threads)
+ def work(it):
+ barrier.wait()
+ for _ in range(number_of_cycles):
+ _ = next(it)
+
+ data = (1, 2, 3, 4)
+ for it in range(number_of_iterations):
+ cycle_iterator = cycle(data)
+ worker_threads = []
+ for ii in range(number_of_threads):
+ worker_threads.append(
+ Thread(target=work, args=[cycle_iterator]))
+
+ with threading_helper.start_threads(worker_threads):
+ pass
+
+ barrier.reset()
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index c39565144bf..7f5d48b9c63 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -1,7 +1,7 @@
# Python test set -- part 1, grammar.
# This just tests whether the parser accepts them all.
-from test.support import check_syntax_error
+from test.support import check_syntax_error, skip_wasi_stack_overflow
from test.support import import_helper
import annotationlib
import inspect
@@ -249,6 +249,18 @@ the \'lazy\' dog.\n\
compile(s, "<test>", "exec")
self.assertIn("was never closed", str(cm.exception))
+ @skip_wasi_stack_overflow()
+ def test_max_level(self):
+ # Macro defined in Parser/lexer/state.h
+ MAXLEVEL = 200
+
+ result = eval("(" * MAXLEVEL + ")" * MAXLEVEL)
+ self.assertEqual(result, ())
+
+ with self.assertRaises(SyntaxError) as cm:
+ eval("(" * (MAXLEVEL + 1) + ")" * (MAXLEVEL + 1))
+ self.assertStartsWith(str(cm.exception), 'too many nested parentheses')
+
var_annot_global: int # a global annotated is necessary for test_var_annot
diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
index 161c7652d7a..b83ae181718 100644
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -12,6 +12,7 @@ import io
import itertools
import logging
import os
+import re
import sys
import sysconfig
import tempfile
@@ -97,6 +98,14 @@ def read_vectors(hash_name):
yield parts
+DEPRECATED_STRING_PARAMETER = re.escape(
+ "the 'string' keyword parameter is deprecated since "
+ "Python 3.15 and slated for removal in Python 3.19; "
+ "use the 'data' keyword parameter or pass the data "
+ "to hash as a positional argument instead"
+)
+
+
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
'sha224', 'SHA224', 'sha256', 'SHA256',
@@ -140,11 +149,10 @@ class HashLibTestCase(unittest.TestCase):
# of hashlib.new given the algorithm name.
for algorithm, constructors in self.constructors_to_test.items():
constructors.add(getattr(hashlib, algorithm))
- def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm, **kwargs):
- if data is None:
- return hashlib.new(_alg, **kwargs)
- return hashlib.new(_alg, data, **kwargs)
- constructors.add(_test_algorithm_via_hashlib_new)
+ def c(*args, __algorithm_name=algorithm, **kwargs):
+ return hashlib.new(__algorithm_name, *args, **kwargs)
+ c.__name__ = f'do_test_algorithm_via_hashlib_new_{algorithm}'
+ constructors.add(c)
_hashlib = self._conditional_import_module('_hashlib')
self._hashlib = _hashlib
@@ -249,6 +257,81 @@ class HashLibTestCase(unittest.TestCase):
self._hashlib.new("md5", usedforsecurity=False)
self._hashlib.openssl_md5(usedforsecurity=False)
+ @unittest.skipIf(get_fips_mode(), "skip in FIPS mode")
+ def test_clinic_signature(self):
+ for constructor in self.hash_constructors:
+ with self.subTest(constructor.__name__):
+ constructor(b'')
+ constructor(data=b'')
+ with self.assertWarnsRegex(DeprecationWarning,
+ DEPRECATED_STRING_PARAMETER):
+ constructor(string=b'')
+
+ digest_name = constructor(b'').name
+ with self.subTest(digest_name):
+ hashlib.new(digest_name, b'')
+ hashlib.new(digest_name, data=b'')
+ with self.assertWarnsRegex(DeprecationWarning,
+ DEPRECATED_STRING_PARAMETER):
+ hashlib.new(digest_name, string=b'')
+ if self._hashlib:
+ self._hashlib.new(digest_name, b'')
+ self._hashlib.new(digest_name, data=b'')
+ with self.assertWarnsRegex(DeprecationWarning,
+ DEPRECATED_STRING_PARAMETER):
+ self._hashlib.new(digest_name, string=b'')
+
+ @unittest.skipIf(get_fips_mode(), "skip in FIPS mode")
+ def test_clinic_signature_errors(self):
+ nomsg = b''
+ mymsg = b'msg'
+ conflicting_call = re.escape(
+ "'data' and 'string' are mutually exclusive "
+ "and support for 'string' keyword parameter "
+ "is slated for removal in a future version."
+ )
+ duplicated_param = re.escape("given by name ('data') and position")
+ unexpected_param = re.escape("got an unexpected keyword argument '_'")
+ for args, kwds, errmsg in [
+ # Reject duplicated arguments before unknown keyword arguments.
+ ((nomsg,), dict(data=nomsg, _=nomsg), duplicated_param),
+ ((mymsg,), dict(data=nomsg, _=nomsg), duplicated_param),
+ # Reject duplicated arguments before conflicting ones.
+ *itertools.product(
+ [[nomsg], [mymsg]],
+ [dict(data=nomsg), dict(data=nomsg, string=nomsg)],
+ [duplicated_param]
+ ),
+ # Reject unknown keyword arguments before conflicting ones.
+ *itertools.product(
+ [()],
+ [
+ dict(_=None),
+ dict(data=nomsg, _=None),
+ dict(string=nomsg, _=None),
+ dict(string=nomsg, data=nomsg, _=None),
+ ],
+ [unexpected_param]
+ ),
+ ((nomsg,), dict(_=None), unexpected_param),
+ ((mymsg,), dict(_=None), unexpected_param),
+ # Reject conflicting arguments.
+ [(nomsg,), dict(string=nomsg), conflicting_call],
+ [(mymsg,), dict(string=nomsg), conflicting_call],
+ [(), dict(data=nomsg, string=nomsg), conflicting_call],
+ ]:
+ for constructor in self.hash_constructors:
+ digest_name = constructor(b'').name
+ with self.subTest(constructor.__name__, args=args, kwds=kwds):
+ with self.assertRaisesRegex(TypeError, errmsg):
+ constructor(*args, **kwds)
+ with self.subTest(digest_name, args=args, kwds=kwds):
+ with self.assertRaisesRegex(TypeError, errmsg):
+ hashlib.new(digest_name, *args, **kwds)
+ if self._hashlib:
+ with self.assertRaisesRegex(TypeError, errmsg):
+ self._hashlib.new(digest_name, *args, **kwds)
+
def test_unknown_hash(self):
self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
self.assertRaises(TypeError, hashlib.new, 1)
@@ -718,8 +801,6 @@ class HashLibTestCase(unittest.TestCase):
self.assertRaises(ValueError, constructor, node_offset=-1)
self.assertRaises(OverflowError, constructor, node_offset=max_offset+1)
- self.assertRaises(TypeError, constructor, data=b'')
- self.assertRaises(TypeError, constructor, string=b'')
self.assertRaises(TypeError, constructor, '')
constructor(
diff --git a/Lib/test/test_interpreters/test_api.py b/Lib/test/test_interpreters/test_api.py
index 165949167ce..b3c9ef8efba 100644
--- a/Lib/test/test_interpreters/test_api.py
+++ b/Lib/test/test_interpreters/test_api.py
@@ -1,17 +1,22 @@
+import contextlib
import os
import pickle
+import sys
from textwrap import dedent
import threading
import types
import unittest
from test import support
+from test.support import os_helper
+from test.support import script_helper
from test.support import import_helper
# Raise SkipTest if subinterpreters not supported.
_interpreters = import_helper.import_module('_interpreters')
from test.support import Py_GIL_DISABLED
from test.support import interpreters
from test.support import force_not_colorized
+import test._crossinterp_definitions as defs
from test.support.interpreters import (
InterpreterError, InterpreterNotFoundError, ExecutionFailed,
)
@@ -29,6 +34,59 @@ WHENCE_STR_XI = 'cross-interpreter C-API'
WHENCE_STR_STDLIB = '_interpreters module'
+def is_pickleable(obj):
+ try:
+ pickle.dumps(obj)
+ except Exception:
+ return False
+ return True
+
+
+@contextlib.contextmanager
+def defined_in___main__(name, script, *, remove=False):
+ import __main__ as mainmod
+ mainns = vars(mainmod)
+ assert name not in mainns
+ exec(script, mainns, mainns)
+ if remove:
+ yield mainns.pop(name)
+ else:
+ try:
+ yield mainns[name]
+ finally:
+ mainns.pop(name, None)
+
+
+def build_excinfo(exctype, msg=None, formatted=None, errdisplay=None):
+ if isinstance(exctype, type):
+ assert issubclass(exctype, BaseException), exctype
+ exctype = types.SimpleNamespace(
+ __name__=exctype.__name__,
+ __qualname__=exctype.__qualname__,
+ __module__=exctype.__module__,
+ )
+ elif isinstance(exctype, str):
+ module, _, name = exctype.rpartition(exctype)
+ if not module and name in __builtins__:
+ module = 'builtins'
+ exctype = types.SimpleNamespace(
+ __name__=name,
+ __qualname__=exctype,
+ __module__=module or None,
+ )
+ else:
+ assert isinstance(exctype, types.SimpleNamespace)
+ assert msg is None or isinstance(msg, str), msg
+ assert formatted is None or isinstance(formatted, str), formatted
+ assert errdisplay is None or isinstance(errdisplay, str), errdisplay
+ return types.SimpleNamespace(
+ type=exctype,
+ msg=msg,
+ formatted=formatted,
+ errdisplay=errdisplay,
+ )
+
+
class ModuleTests(TestBase):
def test_queue_aliases(self):
@@ -890,24 +948,26 @@ class TestInterpreterExec(TestBase):
# Interpreter.exec() behavior.
-def call_func_noop():
- pass
+call_func_noop = defs.spam_minimal
+call_func_ident = defs.spam_returns_arg
+call_func_failure = defs.spam_raises
def call_func_return_shareable():
return (1, None)
-def call_func_return_not_shareable():
- return [1, 2, 3]
+def call_func_return_stateless_func():
+ return (lambda x: x)
-def call_func_failure():
- raise Exception('spam!')
+def call_func_return_pickleable():
+ return [1, 2, 3]
-def call_func_ident(value):
- return value
+def call_func_return_unpickleable():
+ x = 42
+ return (lambda: x)
def get_call_func_closure(value):
@@ -916,6 +976,11 @@ def get_call_func_closure(value):
return call_func_closure
+def call_func_exec_wrapper(script, ns):
+ res = exec(script, ns, ns)
+ return res, ns, id(ns)
+
+
class Spam:
@staticmethod
@@ -1012,86 +1077,375 @@ class TestInterpreterCall(TestBase):
# - preserves info (e.g. SyntaxError)
# - matching error display
- def test_call(self):
+ @contextlib.contextmanager
+ def assert_fails(self, expected):
+ with self.assertRaises(ExecutionFailed) as cm:
+ yield cm
+ uncaught = cm.exception.excinfo
+ self.assertEqual(uncaught.type.__name__, expected.__name__)
+
+ def assert_fails_not_shareable(self):
+ return self.assert_fails(interpreters.NotShareableError)
+
+ def assert_code_equal(self, code1, code2):
+ if code1 == code2:
+ return
+ self.assertEqual(code1.co_name, code2.co_name)
+ self.assertEqual(code1.co_flags, code2.co_flags)
+ self.assertEqual(code1.co_consts, code2.co_consts)
+ self.assertEqual(code1.co_varnames, code2.co_varnames)
+ self.assertEqual(code1.co_cellvars, code2.co_cellvars)
+ self.assertEqual(code1.co_freevars, code2.co_freevars)
+ self.assertEqual(code1.co_names, code2.co_names)
+ self.assertEqual(
+ _testinternalcapi.get_code_var_counts(code1),
+ _testinternalcapi.get_code_var_counts(code2),
+ )
+ self.assertEqual(code1.co_code, code2.co_code)
+
+ def assert_funcs_equal(self, func1, func2):
+ if func1 == func2:
+ return
+ self.assertIs(type(func1), type(func2))
+ self.assertEqual(func1.__name__, func2.__name__)
+ self.assertEqual(func1.__defaults__, func2.__defaults__)
+ self.assertEqual(func1.__kwdefaults__, func2.__kwdefaults__)
+ self.assertEqual(func1.__closure__, func2.__closure__)
+ self.assert_code_equal(func1.__code__, func2.__code__)
+ self.assertEqual(
+ _testinternalcapi.get_code_var_counts(func1),
+ _testinternalcapi.get_code_var_counts(func2),
+ )
+
+ def assert_exceptions_equal(self, exc1, exc2):
+ assert isinstance(exc1, Exception)
+ assert isinstance(exc2, Exception)
+ if exc1 == exc2:
+ return
+ self.assertIs(type(exc1), type(exc2))
+ self.assertEqual(exc1.args, exc2.args)
+
+ def test_stateless_funcs(self):
interp = interpreters.create()
- for i, (callable, args, kwargs) in enumerate([
- (call_func_noop, (), {}),
- (Spam.noop, (), {}),
+ func = call_func_noop
+ with self.subTest('no args, no return'):
+ res = interp.call(func)
+ self.assertIsNone(res)
+
+ func = call_func_return_shareable
+ with self.subTest('no args, returns shareable'):
+ res = interp.call(func)
+ self.assertEqual(res, (1, None))
+
+ func = call_func_return_stateless_func
+ expected = (lambda x: x)
+ with self.subTest('no args, returns stateless func'):
+ res = interp.call(func)
+ self.assert_funcs_equal(res, expected)
+
+ func = call_func_return_pickleable
+ with self.subTest('no args, returns pickleable'):
+ res = interp.call(func)
+ self.assertEqual(res, [1, 2, 3])
+
+ func = call_func_return_unpickleable
+ with self.subTest('no args, returns unpickleable'):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func)
+
+ def test_stateless_func_returns_arg(self):
+ interp = interpreters.create()
+
+ for arg in [
+ None,
+ 10,
+ 'spam!',
+ b'spam!',
+ (1, 2, 'spam!'),
+ memoryview(b'spam!'),
+ ]:
+ with self.subTest(f'shareable {arg!r}'):
+ assert _interpreters.is_shareable(arg)
+ res = interp.call(defs.spam_returns_arg, arg)
+ self.assertEqual(res, arg)
+
+ for arg in defs.STATELESS_FUNCTIONS:
+ with self.subTest(f'stateless func {arg!r}'):
+ res = interp.call(defs.spam_returns_arg, arg)
+ self.assert_funcs_equal(res, arg)
+
+ for arg in defs.TOP_FUNCTIONS:
+ if arg in defs.STATELESS_FUNCTIONS:
+ continue
+ with self.subTest(f'stateful func {arg!r}'):
+ res = interp.call(defs.spam_returns_arg, arg)
+ self.assert_funcs_equal(res, arg)
+ assert is_pickleable(arg)
+
+ for arg in [
+ Ellipsis,
+ NotImplemented,
+ object(),
+ 2**1000,
+ [1, 2, 3],
+ {'a': 1, 'b': 2},
+ types.SimpleNamespace(x=42),
+ # builtin types
+ object,
+ type,
+ Exception,
+ ModuleNotFoundError,
+ # builtin exceptions
+ Exception('uh-oh!'),
+ ModuleNotFoundError('mymodule'),
+ # builtin fnctions
+ len,
+ sys.exit,
+ # user classes
+ *defs.TOP_CLASSES,
+ *(c(*a) for c, a in defs.TOP_CLASSES.items()
+ if c not in defs.CLASSES_WITHOUT_EQUALITY),
+ ]:
+ with self.subTest(f'pickleable {arg!r}'):
+ res = interp.call(defs.spam_returns_arg, arg)
+ if type(arg) is object:
+ self.assertIs(type(res), object)
+ elif isinstance(arg, BaseException):
+ self.assert_exceptions_equal(res, arg)
+ else:
+ self.assertEqual(res, arg)
+ assert is_pickleable(arg)
+
+ for arg in [
+ types.MappingProxyType({}),
+ *(f for f in defs.NESTED_FUNCTIONS
+ if f not in defs.STATELESS_FUNCTIONS),
+ ]:
+ with self.subTest(f'unpickleable {arg!r}'):
+ assert not _interpreters.is_shareable(arg)
+ assert not is_pickleable(arg)
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(defs.spam_returns_arg, arg)
+
+ def test_full_args(self):
+ interp = interpreters.create()
+ expected = (1, 2, 3, 4, 5, 6, ('?',), {'g': 7, 'h': 8})
+ func = defs.spam_full_args
+ res = interp.call(func, 1, 2, 3, 4, '?', e=5, f=6, g=7, h=8)
+ self.assertEqual(res, expected)
+
+ def test_full_defaults(self):
+ # pickleable, but not stateless
+ interp = interpreters.create()
+ expected = (-1, -2, -3, -4, -5, -6, (), {'g': 8, 'h': 9})
+ res = interp.call(defs.spam_full_args_with_defaults, g=8, h=9)
+ self.assertEqual(res, expected)
+
+ def test_modified_arg(self):
+ interp = interpreters.create()
+ script = dedent("""
+ a = 7
+ b = 2
+ c = a ** b
+ """)
+ ns = {}
+ expected = {'a': 7, 'b': 2, 'c': 49}
+ res = interp.call(call_func_exec_wrapper, script, ns)
+ obj, resns, resid = res
+ del resns['__builtins__']
+ self.assertIsNone(obj)
+ self.assertEqual(ns, {})
+ self.assertEqual(resns, expected)
+ self.assertNotEqual(resid, id(ns))
+ self.assertNotEqual(resid, id(resns))
+
+ def test_func_in___main___valid(self):
+ # pickleable, already there'
+
+ with os_helper.temp_dir() as tempdir:
+ def new_mod(name, text):
+ script_helper.make_script(tempdir, name, dedent(text))
+
+ def run(text):
+ name = 'myscript'
+ text = dedent(f"""
+ import sys
+ sys.path.insert(0, {tempdir!r})
+
+ """) + dedent(text)
+ filename = script_helper.make_script(tempdir, name, text)
+ res = script_helper.assert_python_ok(filename)
+ return res.out.decode('utf-8').strip()
+
+ # no module indirection
+ with self.subTest('no indirection'):
+ text = run(f"""
+ from test.support import interpreters
+
+ def spam():
+ # This a global var...
+ return __name__
+
+ if __name__ == '__main__':
+ interp = interpreters.create()
+ res = interp.call(spam)
+ print(res)
+ """)
+ self.assertEqual(text, '<fake __main__>')
+
+ # indirect as func, direct interp
+ new_mod('mymod', f"""
+ def run(interp, func):
+ return interp.call(func)
+ """)
+ with self.subTest('indirect as func, direct interp'):
+ text = run(f"""
+ from test.support import interpreters
+ import mymod
+
+ def spam():
+ # This a global var...
+ return __name__
+
+ if __name__ == '__main__':
+ interp = interpreters.create()
+ res = mymod.run(interp, spam)
+ print(res)
+ """)
+ self.assertEqual(text, '<fake __main__>')
+
+ # indirect as func, indirect interp
+ new_mod('mymod', f"""
+ from test.support import interpreters
+ def run(func):
+ interp = interpreters.create()
+ return interp.call(func)
+ """)
+ with self.subTest('indirect as func, indirect interp'):
+ text = run(f"""
+ import mymod
+
+ def spam():
+ # This a global var...
+ return __name__
+
+ if __name__ == '__main__':
+ res = mymod.run(spam)
+ print(res)
+ """)
+ self.assertEqual(text, '<fake __main__>')
+
+ def test_func_in___main___invalid(self):
+ interp = interpreters.create()
+
+ funcname = f'{__name__.replace(".", "_")}_spam_okay'
+ script = dedent(f"""
+ def {funcname}():
+ # This a global var...
+ return __name__
+ """)
+
+ with self.subTest('pickleable, added dynamically'):
+ with defined_in___main__(funcname, script) as arg:
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(defs.spam_returns_arg, arg)
+
+ with self.subTest('lying about __main__'):
+ with defined_in___main__(funcname, script, remove=True) as arg:
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(defs.spam_returns_arg, arg)
+
+ def test_raises(self):
+ interp = interpreters.create()
+ with self.assertRaises(ExecutionFailed):
+ interp.call(call_func_failure)
+
+ with self.assert_fails(ValueError):
+ interp.call(call_func_complex, '???', exc=ValueError('spam'))
+
+ def test_call_valid(self):
+ interp = interpreters.create()
+
+ for i, (callable, args, kwargs, expected) in enumerate([
+ (call_func_noop, (), {}, None),
+ (call_func_ident, ('spamspamspam',), {}, 'spamspamspam'),
+ (call_func_return_shareable, (), {}, (1, None)),
+ (call_func_return_pickleable, (), {}, [1, 2, 3]),
+ (Spam.noop, (), {}, None),
+ (Spam.from_values, (), {}, Spam(())),
+ (Spam.from_values, (1, 2, 3), {}, Spam((1, 2, 3))),
+ (Spam, ('???',), {}, Spam('???')),
+ (Spam(101), (), {}, (101, (), {})),
+ (Spam(10101).run, (), {}, (10101, (), {})),
+ (call_func_complex, ('ident', 'spam'), {}, 'spam'),
+ (call_func_complex, ('full-ident', 'spam'), {}, ('spam', (), {})),
+ (call_func_complex, ('full-ident', 'spam', 'ham'), {'eggs': '!!!'},
+ ('spam', ('ham',), {'eggs': '!!!'})),
+ (call_func_complex, ('globals',), {}, __name__),
+ (call_func_complex, ('interpid',), {}, interp.id),
+ (call_func_complex, ('custom', 'spam!'), {}, Spam('spam!')),
]):
with self.subTest(f'success case #{i+1}'):
- res = interp.call(callable)
- self.assertIs(res, None)
+ res = interp.call(callable, *args, **kwargs)
+ self.assertEqual(res, expected)
+
+ def test_call_invalid(self):
+ interp = interpreters.create()
+
+ func = get_call_func_closure
+ with self.subTest(func):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func, 42)
+
+ func = get_call_func_closure(42)
+ with self.subTest(func):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func)
+
+ func = call_func_complex
+ op = 'closure'
+ with self.subTest(f'{func} ({op})'):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func, op, value='~~~')
+
+ op = 'custom-inner'
+ with self.subTest(f'{func} ({op})'):
+ with self.assertRaises(interpreters.NotShareableError):
+ interp.call(func, op, 'eggs!')
+
+ def test_call_in_thread(self):
+ interp = interpreters.create()
for i, (callable, args, kwargs) in enumerate([
- (call_func_ident, ('spamspamspam',), {}),
- (get_call_func_closure, (42,), {}),
- (get_call_func_closure(42), (), {}),
+ (call_func_noop, (), {}),
+ (call_func_return_shareable, (), {}),
+ (call_func_return_pickleable, (), {}),
(Spam.from_values, (), {}),
(Spam.from_values, (1, 2, 3), {}),
- (Spam, ('???'), {}),
(Spam(101), (), {}),
(Spam(10101).run, (), {}),
+ (Spam.noop, (), {}),
(call_func_complex, ('ident', 'spam'), {}),
(call_func_complex, ('full-ident', 'spam'), {}),
(call_func_complex, ('full-ident', 'spam', 'ham'), {'eggs': '!!!'}),
(call_func_complex, ('globals',), {}),
(call_func_complex, ('interpid',), {}),
- (call_func_complex, ('closure',), {'value': '~~~'}),
(call_func_complex, ('custom', 'spam!'), {}),
- (call_func_complex, ('custom-inner', 'eggs!'), {}),
- (call_func_complex, ('???',), {'exc': ValueError('spam')}),
- (call_func_return_shareable, (), {}),
- (call_func_return_not_shareable, (), {}),
- ]):
- with self.subTest(f'invalid case #{i+1}'):
- with self.assertRaises(Exception):
- if args or kwargs:
- raise Exception((args, kwargs))
- interp.call(callable)
-
- with self.assertRaises(ExecutionFailed):
- interp.call(call_func_failure)
-
- def test_call_in_thread(self):
- interp = interpreters.create()
-
- for i, (callable, args, kwargs) in enumerate([
- (call_func_noop, (), {}),
- (Spam.noop, (), {}),
]):
with self.subTest(f'success case #{i+1}'):
with self.captured_thread_exception() as ctx:
- t = interp.call_in_thread(callable)
+ t = interp.call_in_thread(callable, *args, **kwargs)
t.join()
self.assertIsNone(ctx.caught)
for i, (callable, args, kwargs) in enumerate([
- (call_func_ident, ('spamspamspam',), {}),
(get_call_func_closure, (42,), {}),
(get_call_func_closure(42), (), {}),
- (Spam.from_values, (), {}),
- (Spam.from_values, (1, 2, 3), {}),
- (Spam, ('???'), {}),
- (Spam(101), (), {}),
- (Spam(10101).run, (), {}),
- (call_func_complex, ('ident', 'spam'), {}),
- (call_func_complex, ('full-ident', 'spam'), {}),
- (call_func_complex, ('full-ident', 'spam', 'ham'), {'eggs': '!!!'}),
- (call_func_complex, ('globals',), {}),
- (call_func_complex, ('interpid',), {}),
- (call_func_complex, ('closure',), {'value': '~~~'}),
- (call_func_complex, ('custom', 'spam!'), {}),
- (call_func_complex, ('custom-inner', 'eggs!'), {}),
- (call_func_complex, ('???',), {'exc': ValueError('spam')}),
- (call_func_return_shareable, (), {}),
- (call_func_return_not_shareable, (), {}),
]):
with self.subTest(f'invalid case #{i+1}'):
- if args or kwargs:
- continue
with self.captured_thread_exception() as ctx:
- t = interp.call_in_thread(callable)
+ t = interp.call_in_thread(callable, *args, **kwargs)
t.join()
self.assertIsNotNone(ctx.caught)
@@ -1600,18 +1954,14 @@ class LowLevelTests(TestBase):
with results:
exc = _interpreters.exec(interpid, script)
out = results.stdout()
- self.assertEqual(out, '')
- self.assert_ns_equal(exc, types.SimpleNamespace(
- type=types.SimpleNamespace(
- __name__='Exception',
- __qualname__='Exception',
- __module__='builtins',
- ),
- msg='uh-oh!',
+ expected = build_excinfo(
+ Exception, 'uh-oh!',
# We check these in other tests.
formatted=exc.formatted,
errdisplay=exc.errdisplay,
- ))
+ )
+ self.assertEqual(out, '')
+ self.assert_ns_equal(exc, expected)
with self.subTest('from C-API'):
with self.interpreter_from_capi() as interpid:
@@ -1623,25 +1973,50 @@ class LowLevelTests(TestBase):
self.assertEqual(exc.msg, 'it worked!')
def test_call(self):
- with self.subTest('no args'):
- interpid = _interpreters.create()
- with self.assertRaises(ValueError):
- _interpreters.call(interpid, call_func_return_shareable)
+ interpid = _interpreters.create()
+
+ # Here we focus on basic args and return values.
+ # See TestInterpreterCall for full operational coverage,
+ # including supported callables.
+
+ with self.subTest('no args, return None'):
+ func = defs.spam_minimal
+ res, exc = _interpreters.call(interpid, func)
+ self.assertIsNone(exc)
+ self.assertIsNone(res)
+
+ with self.subTest('empty args, return None'):
+ func = defs.spam_minimal
+ res, exc = _interpreters.call(interpid, func, (), {})
+ self.assertIsNone(exc)
+ self.assertIsNone(res)
+
+ with self.subTest('no args, return non-None'):
+ func = defs.script_with_return
+ res, exc = _interpreters.call(interpid, func)
+ self.assertIsNone(exc)
+ self.assertIs(res, True)
+
+ with self.subTest('full args, return non-None'):
+ expected = (1, 2, 3, 4, 5, 6, (7, 8), {'g': 9, 'h': 0})
+ func = defs.spam_full_args
+ args = (1, 2, 3, 4, 7, 8)
+ kwargs = dict(e=5, f=6, g=9, h=0)
+ res, exc = _interpreters.call(interpid, func, args, kwargs)
+ self.assertIsNone(exc)
+ self.assertEqual(res, expected)
with self.subTest('uncaught exception'):
- interpid = _interpreters.create()
- exc = _interpreters.call(interpid, call_func_failure)
- self.assertEqual(exc, types.SimpleNamespace(
- type=types.SimpleNamespace(
- __name__='Exception',
- __qualname__='Exception',
- __module__='builtins',
- ),
- msg='spam!',
+ func = defs.spam_raises
+ res, exc = _interpreters.call(interpid, func)
+ expected = build_excinfo(
+ Exception, 'spam!',
# We check these in other tests.
formatted=exc.formatted,
errdisplay=exc.errdisplay,
- ))
+ )
+ self.assertIsNone(res)
+ self.assertEqual(exc, expected)
@requires_test_modules
def test_set___main___attrs(self):
diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py
index 168e66c5a3f..0c921ffbc25 100644
--- a/Lib/test/test_io.py
+++ b/Lib/test/test_io.py
@@ -1062,6 +1062,37 @@ class IOTest(unittest.TestCase):
# Silence destructor error
R.flush = lambda self: None
+ @threading_helper.requires_working_threading()
+ def test_write_readline_races(self):
+ # gh-134908: Concurrent iteration over a file caused races
+ thread_count = 2
+ write_count = 100
+ read_count = 100
+
+ def writer(file, barrier):
+ barrier.wait()
+ for _ in range(write_count):
+ file.write("x")
+
+ def reader(file, barrier):
+ barrier.wait()
+ for _ in range(read_count):
+ for line in file:
+ self.assertEqual(line, "")
+
+ with self.open(os_helper.TESTFN, "w+") as f:
+ barrier = threading.Barrier(thread_count + 1)
+ reader = threading.Thread(target=reader, args=(f, barrier))
+ writers = [threading.Thread(target=writer, args=(f, barrier))
+ for _ in range(thread_count)]
+ with threading_helper.catch_threading_exception() as cm:
+ with threading_helper.start_threads(writers + [reader]):
+ pass
+ self.assertIsNone(cm.exc_type)
+
+ self.assertEqual(os.stat(os_helper.TESTFN).st_size,
+ write_count * thread_count)
+
class CIOTest(IOTest):
diff --git a/Lib/test/test_json/test_dump.py b/Lib/test/test_json/test_dump.py
index 13b40020781..39470754003 100644
--- a/Lib/test/test_json/test_dump.py
+++ b/Lib/test/test_json/test_dump.py
@@ -22,6 +22,14 @@ class TestDump:
self.assertIn('valid_key', o)
self.assertNotIn(b'invalid_key', o)
+ def test_dump_skipkeys_indent_empty(self):
+ v = {b'invalid_key': False}
+ self.assertEqual(self.json.dumps(v, skipkeys=True, indent=4), '{}')
+
+ def test_skipkeys_indent(self):
+ v = {b'invalid_key': False, 'valid_key': True}
+ self.assertEqual(self.json.dumps(v, skipkeys=True, indent=4), '{\n "valid_key": true\n}')
+
def test_encode_truefalse(self):
self.assertEqual(self.dumps(
{True: False, False: True}, sort_keys=True),
diff --git a/Lib/test/test_math.py b/Lib/test/test_math.py
index d14336f8bac..384ad5c828d 100644
--- a/Lib/test/test_math.py
+++ b/Lib/test/test_math.py
@@ -1973,6 +1973,28 @@ class MathTests(unittest.TestCase):
self.assertFalse(math.isfinite(float("inf")))
self.assertFalse(math.isfinite(float("-inf")))
+ def testIsnormal(self):
+ self.assertTrue(math.isnormal(1.25))
+ self.assertTrue(math.isnormal(-1.0))
+ self.assertFalse(math.isnormal(0.0))
+ self.assertFalse(math.isnormal(-0.0))
+ self.assertFalse(math.isnormal(INF))
+ self.assertFalse(math.isnormal(NINF))
+ self.assertFalse(math.isnormal(NAN))
+ self.assertFalse(math.isnormal(FLOAT_MIN/2))
+ self.assertFalse(math.isnormal(-FLOAT_MIN/2))
+
+ def testIssubnormal(self):
+ self.assertFalse(math.issubnormal(1.25))
+ self.assertFalse(math.issubnormal(-1.0))
+ self.assertFalse(math.issubnormal(0.0))
+ self.assertFalse(math.issubnormal(-0.0))
+ self.assertFalse(math.issubnormal(INF))
+ self.assertFalse(math.issubnormal(NINF))
+ self.assertFalse(math.issubnormal(NAN))
+ self.assertTrue(math.issubnormal(FLOAT_MIN/2))
+ self.assertTrue(math.issubnormal(-FLOAT_MIN/2))
+
def testIsnan(self):
self.assertTrue(math.isnan(float("nan")))
self.assertTrue(math.isnan(float("-nan")))
diff --git a/Lib/test/test_monitoring.py b/Lib/test/test_monitoring.py
index 263e4e6f394..a932ac80117 100644
--- a/Lib/test/test_monitoring.py
+++ b/Lib/test/test_monitoring.py
@@ -2157,6 +2157,21 @@ class TestRegressions(MonitoringTestBase, unittest.TestCase):
sys.monitoring.restart_events()
sys.monitoring.set_events(0, 0)
+ def test_134879(self):
+ # gh-134789
+ # Specialized FOR_ITER not incrementing index
+ def foo():
+ t = 0
+ for i in [1,2,3,4]:
+ t += i
+ self.assertEqual(t, 10)
+
+ sys.monitoring.use_tool_id(0, "test")
+ self.addCleanup(sys.monitoring.free_tool_id, 0)
+ sys.monitoring.set_local_events(0, foo.__code__, E.BRANCH_LEFT | E.BRANCH_RIGHT)
+ foo()
+ sys.monitoring.set_local_events(0, foo.__code__, 0)
+
class TestOptimizer(MonitoringTestBase, unittest.TestCase):
diff --git a/Lib/test/test_ntpath.py b/Lib/test/test_ntpath.py
index c3b0bdaebc2..b891d0734ca 100644
--- a/Lib/test/test_ntpath.py
+++ b/Lib/test/test_ntpath.py
@@ -6,6 +6,7 @@ import subprocess
import sys
import unittest
import warnings
+from ntpath import ALLOW_MISSING
from test.support import TestFailed, cpython_only, os_helper
from test.support.os_helper import FakePath
from test import test_genericpath
@@ -76,6 +77,27 @@ def tester(fn, wantResult):
%(str(fn), str(wantResult), repr(gotResult)))
+def _parameterize(*parameters):
+ """Simplistic decorator to parametrize a test
+
+ Runs the decorated test multiple times in subTest, with a value from
+ 'parameters' passed as an extra positional argument.
+ Calls doCleanups() after each run.
+
+ Not for general use. Intended to avoid indenting for easier backports.
+
+ See https://discuss.python.org/t/91827 for discussing generalizations.
+ """
+ def _parametrize_decorator(func):
+ def _parameterized(self, *args, **kwargs):
+ for parameter in parameters:
+ with self.subTest(parameter):
+ func(self, *args, parameter, **kwargs)
+ self.doCleanups()
+ return _parameterized
+ return _parametrize_decorator
+
+
class NtpathTestCase(unittest.TestCase):
def assertPathEqual(self, path1, path2):
if path1 == path2 or _norm(path1) == _norm(path2):
@@ -474,6 +496,27 @@ class TestNtpath(NtpathTestCase):
tester("ntpath.realpath('.\\.')", expected)
tester("ntpath.realpath('\\'.join(['.'] * 100))", expected)
+ def test_realpath_curdir_strict(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('.', strict=True)", expected)
+ tester("ntpath.realpath('./.', strict=True)", expected)
+ tester("ntpath.realpath('/'.join(['.'] * 100), strict=True)", expected)
+ tester("ntpath.realpath('.\\.', strict=True)", expected)
+ tester("ntpath.realpath('\\'.join(['.'] * 100), strict=True)", expected)
+
+ def test_realpath_curdir_missing_ok(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('.', strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('./.', strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('/'.join(['.'] * 100), strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('.\\.', strict=ALLOW_MISSING)",
+ expected)
+ tester("ntpath.realpath('\\'.join(['.'] * 100), strict=ALLOW_MISSING)",
+ expected)
+
def test_realpath_pardir(self):
expected = ntpath.normpath(os.getcwd())
tester("ntpath.realpath('..')", ntpath.dirname(expected))
@@ -486,24 +529,59 @@ class TestNtpath(NtpathTestCase):
tester("ntpath.realpath('\\'.join(['..'] * 50))",
ntpath.splitdrive(expected)[0] + '\\')
+ def test_realpath_pardir_strict(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('..', strict=True)", ntpath.dirname(expected))
+ tester("ntpath.realpath('../..', strict=True)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('/'.join(['..'] * 50), strict=True)",
+ ntpath.splitdrive(expected)[0] + '\\')
+ tester("ntpath.realpath('..\\..', strict=True)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('\\'.join(['..'] * 50), strict=True)",
+ ntpath.splitdrive(expected)[0] + '\\')
+
+ def test_realpath_pardir_missing_ok(self):
+ expected = ntpath.normpath(os.getcwd())
+ tester("ntpath.realpath('..', strict=ALLOW_MISSING)",
+ ntpath.dirname(expected))
+ tester("ntpath.realpath('../..', strict=ALLOW_MISSING)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('/'.join(['..'] * 50), strict=ALLOW_MISSING)",
+ ntpath.splitdrive(expected)[0] + '\\')
+ tester("ntpath.realpath('..\\..', strict=ALLOW_MISSING)",
+ ntpath.dirname(ntpath.dirname(expected)))
+ tester("ntpath.realpath('\\'.join(['..'] * 50), strict=ALLOW_MISSING)",
+ ntpath.splitdrive(expected)[0] + '\\')
+
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
- def test_realpath_basic(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_basic(self, kwargs):
ABSTFN = ntpath.abspath(os_helper.TESTFN)
open(ABSTFN, "wb").close()
self.addCleanup(os_helper.unlink, ABSTFN)
self.addCleanup(os_helper.unlink, ABSTFN + "1")
os.symlink(ABSTFN, ABSTFN + "1")
- self.assertPathEqual(ntpath.realpath(ABSTFN + "1"), ABSTFN)
- self.assertPathEqual(ntpath.realpath(os.fsencode(ABSTFN + "1")),
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1", **kwargs), ABSTFN)
+ self.assertPathEqual(ntpath.realpath(os.fsencode(ABSTFN + "1"), **kwargs),
os.fsencode(ABSTFN))
# gh-88013: call ntpath.realpath with binary drive name may raise a
# TypeError. The drive should not exist to reproduce the bug.
drives = {f"{c}:\\" for c in string.ascii_uppercase} - set(os.listdrives())
d = drives.pop().encode()
- self.assertEqual(ntpath.realpath(d), d)
+ self.assertEqual(ntpath.realpath(d, strict=False), d)
+
+ # gh-106242: Embedded nulls and non-strict fallback to abspath
+ if kwargs:
+ with self.assertRaises(OSError):
+ ntpath.realpath(os_helper.TESTFN + "\0spam",
+ **kwargs)
+ else:
+ self.assertEqual(ABSTFN + "\0spam",
+ ntpath.realpath(os_helper.TESTFN + "\0spam", **kwargs))
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
@@ -526,51 +604,66 @@ class TestNtpath(NtpathTestCase):
self.assertEqual(realpath(path, strict=False), path)
# gh-106242: Embedded nulls should raise OSError (not ValueError)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFNb + b'\x00'
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFN + '\\nonexistent\\x\x00'
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFNb + b'\\nonexistent\\x\x00'
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
path = ABSTFN + '\x00\\..'
self.assertEqual(realpath(path, strict=False), os.getcwd())
self.assertEqual(realpath(path, strict=True), os.getcwd())
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), os.getcwd())
path = ABSTFNb + b'\x00\\..'
self.assertEqual(realpath(path, strict=False), os.getcwdb())
self.assertEqual(realpath(path, strict=True), os.getcwdb())
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), os.getcwdb())
path = ABSTFN + '\\nonexistent\\x\x00\\..'
self.assertEqual(realpath(path, strict=False), ABSTFN + '\\nonexistent')
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), ABSTFN + '\\nonexistent')
path = ABSTFNb + b'\\nonexistent\\x\x00\\..'
self.assertEqual(realpath(path, strict=False), ABSTFNb + b'\\nonexistent')
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), ABSTFNb + b'\\nonexistent')
+ @unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_invalid_unicode_paths(self, kwargs):
+ realpath = ntpath.realpath
+ ABSTFN = ntpath.abspath(os_helper.TESTFN)
+ ABSTFNb = os.fsencode(ABSTFN)
path = ABSTFNb + b'\xff'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
path = ABSTFNb + b'\\nonexistent\\\xff'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
path = ABSTFNb + b'\xff\\..'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
path = ABSTFNb + b'\\nonexistent\\\xff\\..'
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
- self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
+ self.assertRaises(UnicodeDecodeError, realpath, path, **kwargs)
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
- def test_realpath_relative(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_relative(self, kwargs):
ABSTFN = ntpath.abspath(os_helper.TESTFN)
open(ABSTFN, "wb").close()
self.addCleanup(os_helper.unlink, ABSTFN)
self.addCleanup(os_helper.unlink, ABSTFN + "1")
os.symlink(ABSTFN, ntpath.relpath(ABSTFN + "1"))
- self.assertPathEqual(ntpath.realpath(ABSTFN + "1"), ABSTFN)
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1", **kwargs), ABSTFN)
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
@@ -722,7 +815,62 @@ class TestNtpath(NtpathTestCase):
@os_helper.skip_unless_symlink
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
- def test_realpath_symlink_prefix(self):
+ def test_realpath_symlink_loops_raise(self):
+ # Symlink loops raise OSError in ALLOW_MISSING mode
+ ABSTFN = ntpath.abspath(os_helper.TESTFN)
+ self.addCleanup(os_helper.unlink, ABSTFN)
+ self.addCleanup(os_helper.unlink, ABSTFN + "1")
+ self.addCleanup(os_helper.unlink, ABSTFN + "2")
+ self.addCleanup(os_helper.unlink, ABSTFN + "y")
+ self.addCleanup(os_helper.unlink, ABSTFN + "c")
+ self.addCleanup(os_helper.unlink, ABSTFN + "a")
+ self.addCleanup(os_helper.unlink, ABSTFN + "x")
+
+ os.symlink(ABSTFN, ABSTFN)
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN, strict=ALLOW_MISSING)
+
+ os.symlink(ABSTFN + "1", ABSTFN + "2")
+ os.symlink(ABSTFN + "2", ABSTFN + "1")
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "1",
+ strict=ALLOW_MISSING)
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "2",
+ strict=ALLOW_MISSING)
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "1\\x",
+ strict=ALLOW_MISSING)
+
+ # Windows eliminates '..' components before resolving links;
+ # realpath is not expected to raise if this removes the loop.
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1\\.."),
+ ntpath.dirname(ABSTFN))
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1\\..\\x"),
+ ntpath.dirname(ABSTFN) + "\\x")
+
+ os.symlink(ABSTFN + "x", ABSTFN + "y")
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "1\\..\\"
+ + ntpath.basename(ABSTFN) + "y"),
+ ABSTFN + "x")
+ self.assertRaises(
+ OSError, ntpath.realpath,
+ ABSTFN + "1\\..\\" + ntpath.basename(ABSTFN) + "1",
+ strict=ALLOW_MISSING)
+
+ os.symlink(ntpath.basename(ABSTFN) + "a\\b", ABSTFN + "a")
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "a",
+ strict=ALLOW_MISSING)
+
+ os.symlink("..\\" + ntpath.basename(ntpath.dirname(ABSTFN))
+ + "\\" + ntpath.basename(ABSTFN) + "c", ABSTFN + "c")
+ self.assertRaises(OSError, ntpath.realpath, ABSTFN + "c",
+ strict=ALLOW_MISSING)
+
+ # Test using relative path as well.
+ self.assertRaises(OSError, ntpath.realpath, ntpath.basename(ABSTFN),
+ strict=ALLOW_MISSING)
+
+ @os_helper.skip_unless_symlink
+ @unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_symlink_prefix(self, kwargs):
ABSTFN = ntpath.abspath(os_helper.TESTFN)
self.addCleanup(os_helper.unlink, ABSTFN + "3")
self.addCleanup(os_helper.unlink, "\\\\?\\" + ABSTFN + "3.")
@@ -737,9 +885,9 @@ class TestNtpath(NtpathTestCase):
f.write(b'1')
os.symlink("\\\\?\\" + ABSTFN + "3.", ABSTFN + "3.link")
- self.assertPathEqual(ntpath.realpath(ABSTFN + "3link"),
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "3link", **kwargs),
ABSTFN + "3")
- self.assertPathEqual(ntpath.realpath(ABSTFN + "3.link"),
+ self.assertPathEqual(ntpath.realpath(ABSTFN + "3.link", **kwargs),
"\\\\?\\" + ABSTFN + "3.")
# Resolved paths should be usable to open target files
@@ -749,14 +897,17 @@ class TestNtpath(NtpathTestCase):
self.assertEqual(f.read(), b'1')
# When the prefix is included, it is not stripped
- self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3link"),
+ self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3link", **kwargs),
"\\\\?\\" + ABSTFN + "3")
- self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3.link"),
+ self.assertPathEqual(ntpath.realpath("\\\\?\\" + ABSTFN + "3.link", **kwargs),
"\\\\?\\" + ABSTFN + "3.")
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
def test_realpath_nul(self):
tester("ntpath.realpath('NUL')", r'\\.\NUL')
+ tester("ntpath.realpath('NUL', strict=False)", r'\\.\NUL')
+ tester("ntpath.realpath('NUL', strict=True)", r'\\.\NUL')
+ tester("ntpath.realpath('NUL', strict=ALLOW_MISSING)", r'\\.\NUL')
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
@unittest.skipUnless(HAVE_GETSHORTPATHNAME, 'need _getshortpathname')
@@ -780,12 +931,20 @@ class TestNtpath(NtpathTestCase):
self.assertPathEqual(test_file_long, ntpath.realpath(test_file_short))
- with os_helper.change_cwd(test_dir_long):
- self.assertPathEqual(test_file_long, ntpath.realpath("file.txt"))
- with os_helper.change_cwd(test_dir_long.lower()):
- self.assertPathEqual(test_file_long, ntpath.realpath("file.txt"))
- with os_helper.change_cwd(test_dir_short):
- self.assertPathEqual(test_file_long, ntpath.realpath("file.txt"))
+ for kwargs in {}, {'strict': True}, {'strict': ALLOW_MISSING}:
+ with self.subTest(**kwargs):
+ with os_helper.change_cwd(test_dir_long):
+ self.assertPathEqual(
+ test_file_long,
+ ntpath.realpath("file.txt", **kwargs))
+ with os_helper.change_cwd(test_dir_long.lower()):
+ self.assertPathEqual(
+ test_file_long,
+ ntpath.realpath("file.txt", **kwargs))
+ with os_helper.change_cwd(test_dir_short):
+ self.assertPathEqual(
+ test_file_long,
+ ntpath.realpath("file.txt", **kwargs))
@unittest.skipUnless(HAVE_GETFINALPATHNAME, 'need _getfinalpathname')
def test_realpath_permission(self):
@@ -806,12 +965,15 @@ class TestNtpath(NtpathTestCase):
# Automatic generation of short names may be disabled on
# NTFS volumes for the sake of performance.
# They're not supported at all on ReFS and exFAT.
- subprocess.run(
+ p = subprocess.run(
# Try to set the short name manually.
['fsutil.exe', 'file', 'setShortName', test_file, 'LONGFI~1.TXT'],
creationflags=subprocess.DETACHED_PROCESS
)
+ if p.returncode:
+ raise unittest.SkipTest('failed to set short name')
+
try:
self.assertPathEqual(test_file, ntpath.realpath(test_file_short))
except AssertionError:
diff --git a/Lib/test/test_peepholer.py b/Lib/test/test_peepholer.py
index f33de3d420c..ef596630b93 100644
--- a/Lib/test/test_peepholer.py
+++ b/Lib/test/test_peepholer.py
@@ -2614,6 +2614,90 @@ class OptimizeLoadFastTestCase(DirectCfgOptimizerTests):
]
self.cfg_optimization_test(insts, expected, consts=[None])
+ def test_format_simple(self):
+ # FORMAT_SIMPLE will leave its operand on the stack if it's a unicode
+ # object. We treat it conservatively and assume that it always leaves
+ # its operand on the stack.
+ insts = [
+ ("LOAD_FAST", 0, 1),
+ ("FORMAT_SIMPLE", None, 2),
+ ("STORE_FAST", 1, 3),
+ ]
+ self.check(insts, insts)
+
+ insts = [
+ ("LOAD_FAST", 0, 1),
+ ("FORMAT_SIMPLE", None, 2),
+ ("POP_TOP", None, 3),
+ ]
+ expected = [
+ ("LOAD_FAST_BORROW", 0, 1),
+ ("FORMAT_SIMPLE", None, 2),
+ ("POP_TOP", None, 3),
+ ]
+ self.check(insts, expected)
+
+ def test_set_function_attribute(self):
+ # SET_FUNCTION_ATTRIBUTE leaves the function on the stack
+ insts = [
+ ("LOAD_CONST", 0, 1),
+ ("LOAD_FAST", 0, 2),
+ ("SET_FUNCTION_ATTRIBUTE", 2, 3),
+ ("STORE_FAST", 1, 4),
+ ("LOAD_CONST", 0, 5),
+ ("RETURN_VALUE", None, 6)
+ ]
+ self.cfg_optimization_test(insts, insts, consts=[None])
+
+ insts = [
+ ("LOAD_CONST", 0, 1),
+ ("LOAD_FAST", 0, 2),
+ ("SET_FUNCTION_ATTRIBUTE", 2, 3),
+ ("RETURN_VALUE", None, 4)
+ ]
+ expected = [
+ ("LOAD_CONST", 0, 1),
+ ("LOAD_FAST_BORROW", 0, 2),
+ ("SET_FUNCTION_ATTRIBUTE", 2, 3),
+ ("RETURN_VALUE", None, 4)
+ ]
+ self.cfg_optimization_test(insts, expected, consts=[None])
+
+ def test_get_yield_from_iter(self):
+ # GET_YIELD_FROM_ITER may leave its operand on the stack
+ insts = [
+ ("LOAD_FAST", 0, 1),
+ ("GET_YIELD_FROM_ITER", None, 2),
+ ("LOAD_CONST", 0, 3),
+ send := self.Label(),
+ ("SEND", end := self.Label(), 5),
+ ("YIELD_VALUE", 1, 6),
+ ("RESUME", 2, 7),
+ ("JUMP", send, 8),
+ end,
+ ("END_SEND", None, 9),
+ ("LOAD_CONST", 0, 10),
+ ("RETURN_VALUE", None, 11),
+ ]
+ self.cfg_optimization_test(insts, insts, consts=[None])
+
+ def test_push_exc_info(self):
+ insts = [
+ ("LOAD_FAST", 0, 1),
+ ("PUSH_EXC_INFO", None, 2),
+ ]
+ self.check(insts, insts)
+
+ def test_load_special(self):
+ # LOAD_SPECIAL may leave self on the stack
+ insts = [
+ ("LOAD_FAST", 0, 1),
+ ("LOAD_SPECIAL", 0, 2),
+ ("STORE_FAST", 1, 3),
+ ]
+ self.check(insts, insts)
+
+
def test_del_in_finally(self):
# This loads `obj` onto the stack, executes `del obj`, then returns the
# `obj` from the stack. See gh-133371 for more details.
@@ -2630,6 +2714,14 @@ class OptimizeLoadFastTestCase(DirectCfgOptimizerTests):
gc.collect()
self.assertEqual(obj, [42])
+ def test_format_simple_unicode(self):
+ # Repro from gh-134889
+ def f():
+ var = f"{1}"
+ var = f"{var}"
+ return var
+ self.assertEqual(f(), "1")
+
if __name__ == "__main__":
diff --git a/Lib/test/test_posixpath.py b/Lib/test/test_posixpath.py
index f3f9895f529..c45ce6d3ef7 100644
--- a/Lib/test/test_posixpath.py
+++ b/Lib/test/test_posixpath.py
@@ -4,7 +4,8 @@ import posixpath
import random
import sys
import unittest
-from posixpath import realpath, abspath, dirname, basename
+from functools import partial
+from posixpath import realpath, abspath, dirname, basename, ALLOW_MISSING
from test import support
from test import test_genericpath
from test.support import import_helper
@@ -33,6 +34,27 @@ def skip_if_ABSTFN_contains_backslash(test):
msg = "ABSTFN is not a posix path - tests fail"
return [test, unittest.skip(msg)(test)][found_backslash]
+
+def _parameterize(*parameters):
+ """Simplistic decorator to parametrize a test
+
+ Runs the decorated test multiple times in subTest, with a value from
+ 'parameters' passed as an extra positional argument.
+ Does *not* call doCleanups() after each run.
+
+ Not for general use. Intended to avoid indenting for easier backports.
+
+ See https://discuss.python.org/t/91827 for discussing generalizations.
+ """
+ def _parametrize_decorator(func):
+ def _parameterized(self, *args, **kwargs):
+ for parameter in parameters:
+ with self.subTest(parameter):
+ func(self, *args, parameter, **kwargs)
+ return _parameterized
+ return _parametrize_decorator
+
+
class PosixPathTest(unittest.TestCase):
def setUp(self):
@@ -442,32 +464,35 @@ class PosixPathTest(unittest.TestCase):
self.assertEqual(result, expected)
@skip_if_ABSTFN_contains_backslash
- def test_realpath_curdir(self):
- self.assertEqual(realpath('.'), os.getcwd())
- self.assertEqual(realpath('./.'), os.getcwd())
- self.assertEqual(realpath('/'.join(['.'] * 100)), os.getcwd())
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_curdir(self, kwargs):
+ self.assertEqual(realpath('.', **kwargs), os.getcwd())
+ self.assertEqual(realpath('./.', **kwargs), os.getcwd())
+ self.assertEqual(realpath('/'.join(['.'] * 100), **kwargs), os.getcwd())
- self.assertEqual(realpath(b'.'), os.getcwdb())
- self.assertEqual(realpath(b'./.'), os.getcwdb())
- self.assertEqual(realpath(b'/'.join([b'.'] * 100)), os.getcwdb())
+ self.assertEqual(realpath(b'.', **kwargs), os.getcwdb())
+ self.assertEqual(realpath(b'./.', **kwargs), os.getcwdb())
+ self.assertEqual(realpath(b'/'.join([b'.'] * 100), **kwargs), os.getcwdb())
@skip_if_ABSTFN_contains_backslash
- def test_realpath_pardir(self):
- self.assertEqual(realpath('..'), dirname(os.getcwd()))
- self.assertEqual(realpath('../..'), dirname(dirname(os.getcwd())))
- self.assertEqual(realpath('/'.join(['..'] * 100)), '/')
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_pardir(self, kwargs):
+ self.assertEqual(realpath('..', **kwargs), dirname(os.getcwd()))
+ self.assertEqual(realpath('../..', **kwargs), dirname(dirname(os.getcwd())))
+ self.assertEqual(realpath('/'.join(['..'] * 100), **kwargs), '/')
- self.assertEqual(realpath(b'..'), dirname(os.getcwdb()))
- self.assertEqual(realpath(b'../..'), dirname(dirname(os.getcwdb())))
- self.assertEqual(realpath(b'/'.join([b'..'] * 100)), b'/')
+ self.assertEqual(realpath(b'..', **kwargs), dirname(os.getcwdb()))
+ self.assertEqual(realpath(b'../..', **kwargs), dirname(dirname(os.getcwdb())))
+ self.assertEqual(realpath(b'/'.join([b'..'] * 100), **kwargs), b'/')
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_basic(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_basic(self, kwargs):
# Basic operation.
try:
os.symlink(ABSTFN+"1", ABSTFN)
- self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+ self.assertEqual(realpath(ABSTFN, **kwargs), ABSTFN+"1")
finally:
os_helper.unlink(ABSTFN)
@@ -487,90 +512,115 @@ class PosixPathTest(unittest.TestCase):
path = '/\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = '/nonexistent/x\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/nonexistent/x\x00'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = '/\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(ValueError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
+
path = '/nonexistent/x\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = b'/nonexistent/x\x00/..'
self.assertRaises(ValueError, realpath, path, strict=False)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertRaises(ValueError, realpath, path, strict=ALLOW_MISSING)
path = '/\udfff'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), path)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), path)
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
self.assertRaises(UnicodeEncodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
path = '/nonexistent/\udfff'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), path)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), path)
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
path = '/\udfff/..'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), '/')
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), '/')
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
self.assertRaises(UnicodeEncodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
path = '/nonexistent/\udfff/..'
if sys.platform == 'win32':
self.assertEqual(realpath(path, strict=False), '/nonexistent')
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), '/nonexistent')
else:
self.assertRaises(UnicodeEncodeError, realpath, path, strict=False)
+ self.assertRaises(UnicodeEncodeError, realpath, path, strict=ALLOW_MISSING)
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
path = b'/\xff'
if sys.platform == 'win32':
self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
self.assertRaises(UnicodeDecodeError, realpath, path, strict=True)
+ self.assertRaises(UnicodeDecodeError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertEqual(realpath(path, strict=False), path)
if support.is_wasi:
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
+ self.assertEqual(realpath(path, strict=ALLOW_MISSING), path)
path = b'/nonexistent/\xff'
if sys.platform == 'win32':
self.assertRaises(UnicodeDecodeError, realpath, path, strict=False)
+ self.assertRaises(UnicodeDecodeError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertEqual(realpath(path, strict=False), path)
if support.is_wasi:
self.assertRaises(OSError, realpath, path, strict=True)
+ self.assertRaises(OSError, realpath, path, strict=ALLOW_MISSING)
else:
self.assertRaises(FileNotFoundError, realpath, path, strict=True)
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_relative(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_relative(self, kwargs):
try:
os.symlink(posixpath.relpath(ABSTFN+"1"), ABSTFN)
- self.assertEqual(realpath(ABSTFN), ABSTFN+"1")
+ self.assertEqual(realpath(ABSTFN, **kwargs), ABSTFN+"1")
finally:
os_helper.unlink(ABSTFN)
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_missing_pardir(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_missing_pardir(self, kwargs):
try:
os.symlink(TESTFN + "1", TESTFN)
- self.assertEqual(realpath("nonexistent/../" + TESTFN), ABSTFN + "1")
+ self.assertEqual(
+ realpath("nonexistent/../" + TESTFN, **kwargs), ABSTFN + "1")
finally:
os_helper.unlink(TESTFN)
@@ -617,37 +667,38 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_symlink_loops_strict(self):
+ @_parameterize({'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_symlink_loops_strict(self, kwargs):
# Bug #43757, raise OSError if we get into an infinite symlink loop in
- # strict mode.
+ # the strict modes.
try:
os.symlink(ABSTFN, ABSTFN)
- self.assertRaises(OSError, realpath, ABSTFN, strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN, **kwargs)
os.symlink(ABSTFN+"1", ABSTFN+"2")
os.symlink(ABSTFN+"2", ABSTFN+"1")
- self.assertRaises(OSError, realpath, ABSTFN+"1", strict=True)
- self.assertRaises(OSError, realpath, ABSTFN+"2", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"1", **kwargs)
+ self.assertRaises(OSError, realpath, ABSTFN+"2", **kwargs)
- self.assertRaises(OSError, realpath, ABSTFN+"1/x", strict=True)
- self.assertRaises(OSError, realpath, ABSTFN+"1/..", strict=True)
- self.assertRaises(OSError, realpath, ABSTFN+"1/../x", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"1/x", **kwargs)
+ self.assertRaises(OSError, realpath, ABSTFN+"1/..", **kwargs)
+ self.assertRaises(OSError, realpath, ABSTFN+"1/../x", **kwargs)
os.symlink(ABSTFN+"x", ABSTFN+"y")
self.assertRaises(OSError, realpath,
- ABSTFN+"1/../" + basename(ABSTFN) + "y", strict=True)
+ ABSTFN+"1/../" + basename(ABSTFN) + "y", **kwargs)
self.assertRaises(OSError, realpath,
- ABSTFN+"1/../" + basename(ABSTFN) + "1", strict=True)
+ ABSTFN+"1/../" + basename(ABSTFN) + "1", **kwargs)
os.symlink(basename(ABSTFN) + "a/b", ABSTFN+"a")
- self.assertRaises(OSError, realpath, ABSTFN+"a", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"a", **kwargs)
os.symlink("../" + basename(dirname(ABSTFN)) + "/" +
basename(ABSTFN) + "c", ABSTFN+"c")
- self.assertRaises(OSError, realpath, ABSTFN+"c", strict=True)
+ self.assertRaises(OSError, realpath, ABSTFN+"c", **kwargs)
# Test using relative path as well.
with os_helper.change_cwd(dirname(ABSTFN)):
- self.assertRaises(OSError, realpath, basename(ABSTFN), strict=True)
+ self.assertRaises(OSError, realpath, basename(ABSTFN), **kwargs)
finally:
os_helper.unlink(ABSTFN)
os_helper.unlink(ABSTFN+"1")
@@ -658,13 +709,14 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_repeated_indirect_symlinks(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_repeated_indirect_symlinks(self, kwargs):
# Issue #6975.
try:
os.mkdir(ABSTFN)
os.symlink('../' + basename(ABSTFN), ABSTFN + '/self')
os.symlink('self/self/self', ABSTFN + '/link')
- self.assertEqual(realpath(ABSTFN + '/link'), ABSTFN)
+ self.assertEqual(realpath(ABSTFN + '/link', **kwargs), ABSTFN)
finally:
os_helper.unlink(ABSTFN + '/self')
os_helper.unlink(ABSTFN + '/link')
@@ -672,14 +724,15 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_deep_recursion(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_deep_recursion(self, kwargs):
depth = 10
try:
os.mkdir(ABSTFN)
for i in range(depth):
os.symlink('/'.join(['%d' % i] * 10), ABSTFN + '/%d' % (i + 1))
os.symlink('.', ABSTFN + '/0')
- self.assertEqual(realpath(ABSTFN + '/%d' % depth), ABSTFN)
+ self.assertEqual(realpath(ABSTFN + '/%d' % depth, **kwargs), ABSTFN)
# Test using relative path as well.
with os_helper.change_cwd(ABSTFN):
@@ -691,7 +744,8 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_parents(self):
+ @_parameterize({}, {'strict': ALLOW_MISSING})
+ def test_realpath_resolve_parents(self, kwargs):
# We also need to resolve any symlinks in the parents of a relative
# path passed to realpath. E.g.: current working directory is
# /usr/doc with 'doc' being a symlink to /usr/share/doc. We call
@@ -702,7 +756,8 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "/y", ABSTFN + "/k")
with os_helper.change_cwd(ABSTFN + "/k"):
- self.assertEqual(realpath("a"), ABSTFN + "/y/a")
+ self.assertEqual(realpath("a", **kwargs),
+ ABSTFN + "/y/a")
finally:
os_helper.unlink(ABSTFN + "/k")
os_helper.rmdir(ABSTFN + "/y")
@@ -710,7 +765,8 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_before_normalizing(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_resolve_before_normalizing(self, kwargs):
# Bug #990669: Symbolic links should be resolved before we
# normalize the path. E.g.: if we have directories 'a', 'k' and 'y'
# in the following hierarchy:
@@ -725,10 +781,10 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "/k/y", ABSTFN + "/link-y")
# Absolute path.
- self.assertEqual(realpath(ABSTFN + "/link-y/.."), ABSTFN + "/k")
+ self.assertEqual(realpath(ABSTFN + "/link-y/..", **kwargs), ABSTFN + "/k")
# Relative path.
with os_helper.change_cwd(dirname(ABSTFN)):
- self.assertEqual(realpath(basename(ABSTFN) + "/link-y/.."),
+ self.assertEqual(realpath(basename(ABSTFN) + "/link-y/..", **kwargs),
ABSTFN + "/k")
finally:
os_helper.unlink(ABSTFN + "/link-y")
@@ -738,7 +794,8 @@ class PosixPathTest(unittest.TestCase):
@os_helper.skip_unless_symlink
@skip_if_ABSTFN_contains_backslash
- def test_realpath_resolve_first(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_resolve_first(self, kwargs):
# Bug #1213894: The first component of the path, if not absolute,
# must be resolved too.
@@ -748,8 +805,8 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN, ABSTFN + "link")
with os_helper.change_cwd(dirname(ABSTFN)):
base = basename(ABSTFN)
- self.assertEqual(realpath(base + "link"), ABSTFN)
- self.assertEqual(realpath(base + "link/k"), ABSTFN + "/k")
+ self.assertEqual(realpath(base + "link", **kwargs), ABSTFN)
+ self.assertEqual(realpath(base + "link/k", **kwargs), ABSTFN + "/k")
finally:
os_helper.unlink(ABSTFN + "link")
os_helper.rmdir(ABSTFN + "/k")
@@ -767,12 +824,67 @@ class PosixPathTest(unittest.TestCase):
self.assertEqual(realpath(ABSTFN + '/foo'), ABSTFN + '/foo')
self.assertEqual(realpath(ABSTFN + '/../foo'), dirname(ABSTFN) + '/foo')
self.assertEqual(realpath(ABSTFN + '/foo/..'), ABSTFN)
- with self.assertRaises(PermissionError):
- realpath(ABSTFN, strict=True)
finally:
os.chmod(ABSTFN, 0o755, follow_symlinks=False)
os_helper.unlink(ABSTFN)
+ @os_helper.skip_unless_symlink
+ @skip_if_ABSTFN_contains_backslash
+ @unittest.skipIf(os.chmod not in os.supports_follow_symlinks, "Can't set symlink permissions")
+ @unittest.skipIf(sys.platform != "darwin", "only macOS requires read permission to readlink()")
+ @_parameterize({'strict': True}, {'strict': ALLOW_MISSING})
+ def test_realpath_unreadable_symlink_strict(self, kwargs):
+ try:
+ os.symlink(ABSTFN+"1", ABSTFN)
+ os.chmod(ABSTFN, 0o000, follow_symlinks=False)
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN, **kwargs)
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN + '/foo', **kwargs),
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN + '/../foo', **kwargs)
+ with self.assertRaises(PermissionError):
+ realpath(ABSTFN + '/foo/..', **kwargs)
+ finally:
+ os.chmod(ABSTFN, 0o755, follow_symlinks=False)
+ os.unlink(ABSTFN)
+
+ @skip_if_ABSTFN_contains_backslash
+ @os_helper.skip_unless_symlink
+ def test_realpath_unreadable_directory(self):
+ try:
+ os.mkdir(ABSTFN)
+ os.mkdir(ABSTFN + '/k')
+ os.chmod(ABSTFN, 0o000)
+ self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN)
+ self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN)
+ self.assertEqual(realpath(ABSTFN, strict=ALLOW_MISSING), ABSTFN)
+
+ try:
+ os.stat(ABSTFN)
+ except PermissionError:
+ pass
+ else:
+ self.skipTest('Cannot block permissions')
+
+ self.assertEqual(realpath(ABSTFN + '/k', strict=False),
+ ABSTFN + '/k')
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/k',
+ strict=True)
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/k',
+ strict=ALLOW_MISSING)
+
+ self.assertEqual(realpath(ABSTFN + '/missing', strict=False),
+ ABSTFN + '/missing')
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/missing',
+ strict=True)
+ self.assertRaises(PermissionError, realpath, ABSTFN + '/missing',
+ strict=ALLOW_MISSING)
+ finally:
+ os.chmod(ABSTFN, 0o755)
+ os_helper.rmdir(ABSTFN + '/k')
+ os_helper.rmdir(ABSTFN)
+
@skip_if_ABSTFN_contains_backslash
def test_realpath_nonterminal_file(self):
try:
@@ -780,14 +892,27 @@ class PosixPathTest(unittest.TestCase):
f.write('test_posixpath wuz ere')
self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN)
self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN)
+ self.assertEqual(realpath(ABSTFN, strict=ALLOW_MISSING), ABSTFN)
+
self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN)
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN)
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN))
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "/subdir")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir",
+ strict=ALLOW_MISSING)
finally:
os_helper.unlink(ABSTFN)
@@ -800,14 +925,27 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "1", ABSTFN)
self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "1")
self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "1")
+ self.assertEqual(realpath(ABSTFN, strict=ALLOW_MISSING), ABSTFN + "1")
+
self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "1")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "1")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN))
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "1/subdir")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir",
+ strict=ALLOW_MISSING)
finally:
os_helper.unlink(ABSTFN)
os_helper.unlink(ABSTFN + "1")
@@ -822,14 +960,27 @@ class PosixPathTest(unittest.TestCase):
os.symlink(ABSTFN + "1", ABSTFN)
self.assertEqual(realpath(ABSTFN, strict=False), ABSTFN + "2")
self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "2")
+ self.assertEqual(realpath(ABSTFN, strict=True), ABSTFN + "2")
+
self.assertEqual(realpath(ABSTFN + "/", strict=False), ABSTFN + "2")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/.", strict=False), ABSTFN + "2")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/.",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/..", strict=False), dirname(ABSTFN))
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/..",
+ strict=ALLOW_MISSING)
+
self.assertEqual(realpath(ABSTFN + "/subdir", strict=False), ABSTFN + "2/subdir")
self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir", strict=True)
+ self.assertRaises(NotADirectoryError, realpath, ABSTFN + "/subdir",
+ strict=ALLOW_MISSING)
finally:
os_helper.unlink(ABSTFN)
os_helper.unlink(ABSTFN + "1")
@@ -1017,9 +1168,12 @@ class PathLikeTests(unittest.TestCase):
def test_path_abspath(self):
self.assertPathEqual(self.path.abspath)
- def test_path_realpath(self):
+ @_parameterize({}, {'strict': True}, {'strict': ALLOW_MISSING})
+ def test_path_realpath(self, kwargs):
self.assertPathEqual(self.path.realpath)
+ self.assertPathEqual(partial(self.path.realpath, **kwargs))
+
def test_path_relpath(self):
self.assertPathEqual(self.path.relpath)
diff --git a/Lib/test/test_pyrepl/test_pyrepl.py b/Lib/test/test_pyrepl/test_pyrepl.py
index aa3a592766d..98bae7dd703 100644
--- a/Lib/test/test_pyrepl/test_pyrepl.py
+++ b/Lib/test/test_pyrepl/test_pyrepl.py
@@ -1672,6 +1672,17 @@ class TestMain(ReplTestCase):
self.assertEqual(exit_code, 0)
self.assertNotIn("TypeError", output)
+ @force_not_colorized
+ def test_non_string_suggestion_candidates(self):
+ commands = ("import runpy\n"
+ "runpy._run_module_code('blech', {0: '', 'bluch': ''}, '')\n"
+ "exit()\n")
+
+ output, exit_code = self.run_repl(commands)
+ self.assertEqual(exit_code, 0)
+ self.assertNotIn("all elements in 'candidates' must be strings", output)
+ self.assertIn("bluch", output)
+
def test_readline_history_file(self):
# skip, if readline module is not available
readline = import_module('readline')
diff --git a/Lib/test/test_queue.py b/Lib/test/test_queue.py
index 7f4fe357034..c855fb8fe2b 100644
--- a/Lib/test/test_queue.py
+++ b/Lib/test/test_queue.py
@@ -6,7 +6,7 @@ import threading
import time
import unittest
import weakref
-from test.support import gc_collect
+from test.support import gc_collect, bigmemtest
from test.support import import_helper
from test.support import threading_helper
@@ -963,33 +963,33 @@ class BaseSimpleQueueTest:
# One producer, one consumer => results appended in well-defined order
self.assertEqual(results, inputs)
- def test_many_threads(self):
+ @bigmemtest(size=50, memuse=100*2**20, dry_run=False)
+ def test_many_threads(self, size):
# Test multiple concurrent put() and get()
- N = 50
q = self.q
inputs = list(range(10000))
- results = self.run_threads(N, q, inputs, self.feed, self.consume)
+ results = self.run_threads(size, q, inputs, self.feed, self.consume)
# Multiple consumers without synchronization append the
# results in random order
self.assertEqual(sorted(results), inputs)
- def test_many_threads_nonblock(self):
+ @bigmemtest(size=50, memuse=100*2**20, dry_run=False)
+ def test_many_threads_nonblock(self, size):
# Test multiple concurrent put() and get(block=False)
- N = 50
q = self.q
inputs = list(range(10000))
- results = self.run_threads(N, q, inputs,
+ results = self.run_threads(size, q, inputs,
self.feed, self.consume_nonblock)
self.assertEqual(sorted(results), inputs)
- def test_many_threads_timeout(self):
+ @bigmemtest(size=50, memuse=100*2**20, dry_run=False)
+ def test_many_threads_timeout(self, size):
# Test multiple concurrent put() and get(timeout=...)
- N = 50
q = self.q
inputs = list(range(1000))
- results = self.run_threads(N, q, inputs,
+ results = self.run_threads(size, q, inputs,
self.feed, self.consume_timeout)
self.assertEqual(sorted(results), inputs)
diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py
index bd76d636e4f..54910cd8054 100644
--- a/Lib/test/test_random.py
+++ b/Lib/test/test_random.py
@@ -392,6 +392,8 @@ class TestBasicOps:
self.assertRaises(TypeError, self.gen.getrandbits)
self.assertRaises(TypeError, self.gen.getrandbits, 1, 2)
self.assertRaises(ValueError, self.gen.getrandbits, -1)
+ self.assertRaises(OverflowError, self.gen.getrandbits, 1<<1000)
+ self.assertRaises(ValueError, self.gen.getrandbits, -1<<1000)
self.assertRaises(TypeError, self.gen.getrandbits, 10.1)
def test_pickling(self):
@@ -435,6 +437,8 @@ class TestBasicOps:
self.assertRaises(TypeError, self.gen.randbytes)
self.assertRaises(TypeError, self.gen.randbytes, 1, 2)
self.assertRaises(ValueError, self.gen.randbytes, -1)
+ self.assertRaises(OverflowError, self.gen.randbytes, 1<<1000)
+ self.assertRaises((ValueError, OverflowError), self.gen.randbytes, -1<<1000)
self.assertRaises(TypeError, self.gen.randbytes, 1.0)
def test_mu_sigma_default_args(self):
@@ -806,6 +810,22 @@ class MersenneTwister_TestBasicOps(TestBasicOps, unittest.TestCase):
self.assertEqual(self.gen.getrandbits(100),
97904845777343510404718956115)
+ def test_getrandbits_2G_bits(self):
+ size = 2**31
+ self.gen.seed(1234567)
+ x = self.gen.getrandbits(size)
+ self.assertEqual(x.bit_length(), size)
+ self.assertEqual(x & (2**100-1), 890186470919986886340158459475)
+ self.assertEqual(x >> (size-100), 1226514312032729439655761284440)
+
+ @support.bigmemtest(size=2**32, memuse=1/8+2/15, dry_run=False)
+ def test_getrandbits_4G_bits(self, size):
+ self.gen.seed(1234568)
+ x = self.gen.getrandbits(size)
+ self.assertEqual(x.bit_length(), size)
+ self.assertEqual(x & (2**100-1), 287241425661104632871036099814)
+ self.assertEqual(x >> (size-100), 739728759900339699429794460738)
+
def test_randrange_uses_getrandbits(self):
# Verify use of getrandbits by randrange
# Use same seed as in the cross-platform repeatability test
@@ -962,6 +982,14 @@ class MersenneTwister_TestBasicOps(TestBasicOps, unittest.TestCase):
self.assertEqual(self.gen.randbytes(n),
gen2.getrandbits(n * 8).to_bytes(n, 'little'))
+ @support.bigmemtest(size=2**29, memuse=1+16/15, dry_run=False)
+ def test_randbytes_256M(self, size):
+ self.gen.seed(2849427419)
+ x = self.gen.randbytes(size)
+ self.assertEqual(len(x), size)
+ self.assertEqual(x[:12].hex(), 'f6fd9ae63855ab91ea238b4f')
+ self.assertEqual(x[-12:].hex(), '0e7af69a84ee99bf4a11becc')
+
def test_sample_counts_equivalence(self):
# Test the documented strong equivalence to a sample with repeated elements.
# We run this test on random.Random() which makes deterministic selections
diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py
index 8f4fc09442e..a43d2678ebd 100644
--- a/Lib/test/test_regrtest.py
+++ b/Lib/test/test_regrtest.py
@@ -768,13 +768,16 @@ class BaseTestCase(unittest.TestCase):
self.fail(msg)
return proc
- def run_python(self, args, **kw):
+ def run_python(self, args, isolated=True, **kw):
extraargs = []
if 'uops' in sys._xoptions:
# Pass -X uops along
extraargs.extend(['-X', 'uops'])
- args = [sys.executable, *extraargs, '-X', 'faulthandler', '-I', *args]
- proc = self.run_command(args, **kw)
+ cmd = [sys.executable, *extraargs, '-X', 'faulthandler']
+ if isolated:
+ cmd.append('-I')
+ cmd.extend(args)
+ proc = self.run_command(cmd, **kw)
return proc.stdout
@@ -831,8 +834,8 @@ class ProgramsTestCase(BaseTestCase):
self.check_executed_tests(output, self.tests,
randomize=True, stats=len(self.tests))
- def run_tests(self, args, env=None):
- output = self.run_python(args, env=env)
+ def run_tests(self, args, env=None, isolated=True):
+ output = self.run_python(args, env=env, isolated=isolated)
self.check_output(output)
def test_script_regrtest(self):
@@ -2067,7 +2070,7 @@ class ArgsTestCase(BaseTestCase):
self.check_executed_tests(output, [testname],
failed=[testname],
parallel=True,
- stats=TestStats(1, 1, 0))
+ stats=TestStats(1, 2, 1))
def _check_random_seed(self, run_workers: bool):
# gh-109276: When -r/--randomize is used, random.seed() is called
@@ -2276,7 +2279,6 @@ class ArgsTestCase(BaseTestCase):
def test_xml(self):
code = textwrap.dedent(r"""
import unittest
- from test import support
class VerboseTests(unittest.TestCase):
def test_failed(self):
@@ -2311,6 +2313,39 @@ class ArgsTestCase(BaseTestCase):
for out in testcase.iter('system-out'):
self.assertEqual(out.text, r"abc \x1b def")
+ def test_nonascii(self):
+ code = textwrap.dedent(r"""
+ import unittest
+
+ class NonASCIITests(unittest.TestCase):
+ def test_docstring(self):
+ '''docstring:\u20ac'''
+
+ def test_subtest(self):
+ with self.subTest(param='subtest:\u20ac'):
+ pass
+
+ def test_skip(self):
+ self.skipTest('skipped:\u20ac')
+ """)
+ testname = self.create_test(code=code)
+
+ env = dict(os.environ)
+ env['PYTHONIOENCODING'] = 'ascii'
+
+ def check(output):
+ self.check_executed_tests(output, testname, stats=TestStats(3, 0, 1))
+ self.assertIn(r'docstring:\u20ac', output)
+ self.assertIn(r'skipped:\u20ac', output)
+
+ # Run sequentially
+ output = self.run_tests('-v', testname, env=env, isolated=False)
+ check(output)
+
+ # Run in parallel
+ output = self.run_tests('-j1', '-v', testname, env=env, isolated=False)
+ check(output)
+
class TestUtils(unittest.TestCase):
def test_format_duration(self):
diff --git a/Lib/test/test_ssl.py b/Lib/test/test_ssl.py
index 2767a53d53c..f123f6ece40 100644
--- a/Lib/test/test_ssl.py
+++ b/Lib/test/test_ssl.py
@@ -31,6 +31,7 @@ import weakref
import platform
import sysconfig
import functools
+from contextlib import nullcontext
try:
import ctypes
except ImportError:
@@ -2843,6 +2844,7 @@ class ThreadedTests(unittest.TestCase):
# See GH-124984: OpenSSL is not thread safe.
threads = []
+ warnings_filters = sys.flags.context_aware_warnings
global USE_SAME_TEST_CONTEXT
USE_SAME_TEST_CONTEXT = True
try:
@@ -2851,7 +2853,10 @@ class ThreadedTests(unittest.TestCase):
self.test_alpn_protocols,
self.test_getpeercert,
self.test_crl_check,
- self.test_check_hostname_idn,
+ functools.partial(
+ self.test_check_hostname_idn,
+ warnings_filters=warnings_filters,
+ ),
self.test_wrong_cert_tls12,
self.test_wrong_cert_tls13,
):
@@ -3097,7 +3102,7 @@ class ThreadedTests(unittest.TestCase):
cipher = s.cipher()[0].split('-')
self.assertTrue(cipher[:2], ('ECDHE', 'ECDSA'))
- def test_check_hostname_idn(self):
+ def test_check_hostname_idn(self, warnings_filters=True):
if support.verbose:
sys.stdout.write("\n")
@@ -3152,16 +3157,30 @@ class ThreadedTests(unittest.TestCase):
server_hostname="python.example.org") as s:
with self.assertRaises(ssl.CertificateError):
s.connect((HOST, server.port))
- with ThreadedEchoServer(context=server_context, chatty=True) as server:
- with warnings_helper.check_no_resource_warning(self):
- with self.assertRaises(UnicodeError):
- context.wrap_socket(socket.socket(),
- server_hostname='.pythontest.net')
- with ThreadedEchoServer(context=server_context, chatty=True) as server:
- with warnings_helper.check_no_resource_warning(self):
- with self.assertRaises(UnicodeDecodeError):
- context.wrap_socket(socket.socket(),
- server_hostname=b'k\xf6nig.idn.pythontest.net')
+ with (
+ ThreadedEchoServer(context=server_context, chatty=True) as server,
+ (
+ warnings_helper.check_no_resource_warning(self)
+ if warnings_filters
+ else nullcontext()
+ ),
+ self.assertRaises(UnicodeError),
+ ):
+ context.wrap_socket(socket.socket(), server_hostname='.pythontest.net')
+
+ with (
+ ThreadedEchoServer(context=server_context, chatty=True) as server,
+ (
+ warnings_helper.check_no_resource_warning(self)
+ if warnings_filters
+ else nullcontext()
+ ),
+ self.assertRaises(UnicodeDecodeError),
+ ):
+ context.wrap_socket(
+ socket.socket(),
+ server_hostname=b'k\xf6nig.idn.pythontest.net',
+ )
def test_wrong_cert_tls12(self):
"""Connecting when the server rejects the client's certificate
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 795d1ecbb59..bf415894903 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -729,7 +729,7 @@ class SysModuleTest(unittest.TestCase):
info = sys.thread_info
self.assertEqual(len(info), 3)
self.assertIn(info.name, ('nt', 'pthread', 'pthread-stubs', 'solaris', None))
- self.assertIn(info.lock, ('semaphore', 'mutex+cond', None))
+ self.assertIn(info.lock, ('pymutex', None))
if sys.platform.startswith(("linux", "android", "freebsd")):
self.assertEqual(info.name, "pthread")
elif sys.platform == "win32":
@@ -1135,23 +1135,12 @@ class SysModuleTest(unittest.TestCase):
b = sys.getallocatedblocks()
self.assertLessEqual(b, a)
try:
- # While we could imagine a Python session where the number of
- # multiple buffer objects would exceed the sharing of references,
- # it is unlikely to happen in a normal test run.
- #
- # In free-threaded builds each code object owns an array of
- # pointers to copies of the bytecode. When the number of
- # code objects is a large fraction of the total number of
- # references, this can cause the total number of allocated
- # blocks to exceed the total number of references.
- #
- # For some reason, iOS seems to trigger the "unlikely to happen"
- # case reliably under CI conditions. It's not clear why; but as
- # this test is checking the behavior of getallocatedblock()
- # under garbage collection, we can skip this pre-condition check
- # for now. See GH-130384.
- if not support.Py_GIL_DISABLED and not support.is_apple_mobile:
- self.assertLess(a, sys.gettotalrefcount())
+ # The reported blocks will include immortalized strings, but the
+ # total ref count will not. This will sanity check that among all
+ # other objects (those eligible for garbage collection) there
+ # are more references being tracked than allocated blocks.
+ interned_immortal = sys.getunicodeinternedsize(_only_immortal=True)
+ self.assertLess(a - interned_immortal, sys.gettotalrefcount())
except AttributeError:
# gettotalrefcount() not available
pass
@@ -1299,6 +1288,7 @@ class SysModuleTest(unittest.TestCase):
for name in sys.stdlib_module_names:
self.assertIsInstance(name, str)
+ @unittest.skipUnless(hasattr(sys, '_stdlib_dir'), 'need sys._stdlib_dir')
def test_stdlib_dir(self):
os = import_helper.import_fresh_module('os')
marker = getattr(os, '__file__', None)
diff --git a/Lib/test/test_tarfile.py b/Lib/test/test_tarfile.py
index cf218a2bf14..7055e1ed147 100644
--- a/Lib/test/test_tarfile.py
+++ b/Lib/test/test_tarfile.py
@@ -2715,6 +2715,31 @@ class MiscTest(unittest.TestCase):
str(excinfo.exception),
)
+ @unittest.skipUnless(os_helper.can_symlink(), 'requires symlink support')
+ @unittest.skipUnless(hasattr(os, 'chmod'), "missing os.chmod")
+ @unittest.mock.patch('os.chmod')
+ def test_deferred_directory_attributes_update(self, mock_chmod):
+ # Regression test for gh-127987: setting attributes on arbitrary files
+ tempdir = os.path.join(TEMPDIR, 'test127987')
+ def mock_chmod_side_effect(path, mode, **kwargs):
+ target_path = os.path.realpath(path)
+ if os.path.commonpath([target_path, tempdir]) != tempdir:
+ raise Exception("should not try to chmod anything outside the destination", target_path)
+ mock_chmod.side_effect = mock_chmod_side_effect
+
+ outside_tree_dir = os.path.join(TEMPDIR, 'outside_tree_dir')
+ with ArchiveMaker() as arc:
+ arc.add('x', symlink_to='.')
+ arc.add('x', type=tarfile.DIRTYPE, mode='?rwsrwsrwt')
+ arc.add('x', symlink_to=outside_tree_dir)
+
+ os.makedirs(outside_tree_dir)
+ try:
+ arc.open().extractall(path=tempdir, filter='tar')
+ finally:
+ os_helper.rmtree(outside_tree_dir)
+ os_helper.rmtree(tempdir)
+
class CommandLineTest(unittest.TestCase):
@@ -3275,6 +3300,10 @@ class NoneInfoExtractTests(ReadTest):
got_paths = set(
p.relative_to(directory)
for p in pathlib.Path(directory).glob('**/*'))
+ if self.extraction_filter in (None, 'data'):
+ # The 'data' filter is expected to reject special files
+ for path in 'ustar/fifotype', 'ustar/blktype', 'ustar/chrtype':
+ got_paths.discard(pathlib.Path(path))
self.assertEqual(self.control_paths, got_paths)
@contextmanager
@@ -3504,12 +3533,28 @@ class ArchiveMaker:
self.bio = None
def add(self, name, *, type=None, symlink_to=None, hardlink_to=None,
- mode=None, size=None, **kwargs):
- """Add a member to the test archive. Call within `with`."""
+ mode=None, size=None, content=None, **kwargs):
+ """Add a member to the test archive. Call within `with`.
+
+ Provides many shortcuts:
+ - default `type` is based on symlink_to, hardlink_to, and trailing `/`
+ in name (which is stripped)
+ - size & content defaults are based on each other
+ - content can be str or bytes
+ - mode should be textual ('-rwxrwxrwx')
+
+ (add more! this is unstable internal test-only API)
+ """
name = str(name)
tarinfo = tarfile.TarInfo(name).replace(**kwargs)
+ if content is not None:
+ if isinstance(content, str):
+ content = content.encode()
+ size = len(content)
if size is not None:
tarinfo.size = size
+ if content is None:
+ content = bytes(tarinfo.size)
if mode:
tarinfo.mode = _filemode_to_int(mode)
if symlink_to is not None:
@@ -3523,7 +3568,7 @@ class ArchiveMaker:
if type is not None:
tarinfo.type = type
if tarinfo.isreg():
- fileobj = io.BytesIO(bytes(tarinfo.size))
+ fileobj = io.BytesIO(content)
else:
fileobj = None
self.tar_w.addfile(tarinfo, fileobj)
@@ -3557,7 +3602,7 @@ class TestExtractionFilters(unittest.TestCase):
destdir = outerdir / 'dest'
@contextmanager
- def check_context(self, tar, filter):
+ def check_context(self, tar, filter, *, check_flag=True):
"""Extracts `tar` to `self.destdir` and allows checking the result
If an error occurs, it must be checked using `expect_exception`
@@ -3566,27 +3611,40 @@ class TestExtractionFilters(unittest.TestCase):
except the destination directory itself and parent directories of
other files.
When checking directories, do so before their contents.
+
+ A file called 'flag' is made in outerdir (i.e. outside destdir)
+ before extraction; it should not be altered nor should its contents
+ be read/copied.
"""
with os_helper.temp_dir(self.outerdir):
+ flag_path = self.outerdir / 'flag'
+ flag_path.write_text('capture me')
try:
tar.extractall(self.destdir, filter=filter)
except Exception as exc:
self.raised_exception = exc
+ self.reraise_exception = True
self.expected_paths = set()
else:
self.raised_exception = None
+ self.reraise_exception = False
self.expected_paths = set(self.outerdir.glob('**/*'))
self.expected_paths.discard(self.destdir)
+ self.expected_paths.discard(flag_path)
try:
- yield
+ yield self
finally:
tar.close()
- if self.raised_exception:
+ if self.reraise_exception:
raise self.raised_exception
self.assertEqual(self.expected_paths, set())
+ if check_flag:
+ self.assertEqual(flag_path.read_text(), 'capture me')
+ else:
+ assert filter == 'fully_trusted'
def expect_file(self, name, type=None, symlink_to=None, mode=None,
- size=None):
+ size=None, content=None):
"""Check a single file. See check_context."""
if self.raised_exception:
raise self.raised_exception
@@ -3605,26 +3663,45 @@ class TestExtractionFilters(unittest.TestCase):
# The symlink might be the same (textually) as what we expect,
# but some systems change the link to an equivalent path, so
# we fall back to samefile().
- if expected != got:
- self.assertTrue(got.samefile(expected))
+ try:
+ if expected != got:
+ self.assertTrue(got.samefile(expected))
+ except Exception as e:
+ # attach a note, so it's shown even if `samefile` fails
+ e.add_note(f'{expected=}, {got=}')
+ raise
elif type == tarfile.REGTYPE or type is None:
self.assertTrue(path.is_file())
elif type == tarfile.DIRTYPE:
self.assertTrue(path.is_dir())
elif type == tarfile.FIFOTYPE:
self.assertTrue(path.is_fifo())
+ elif type == tarfile.SYMTYPE:
+ self.assertTrue(path.is_symlink())
else:
raise NotImplementedError(type)
if size is not None:
self.assertEqual(path.stat().st_size, size)
+ if content is not None:
+ self.assertEqual(path.read_text(), content)
for parent in path.parents:
self.expected_paths.discard(parent)
+ def expect_any_tree(self, name):
+ """Check a directory; forget about its contents."""
+ tree_path = (self.destdir / name).resolve()
+ self.expect_file(tree_path, type=tarfile.DIRTYPE)
+ self.expected_paths = {
+ p for p in self.expected_paths
+ if tree_path not in p.parents
+ }
+
def expect_exception(self, exc_type, message_re='.'):
with self.assertRaisesRegex(exc_type, message_re):
if self.raised_exception is not None:
raise self.raised_exception
- self.raised_exception = None
+ self.reraise_exception = False
+ return self.raised_exception
def test_benign_file(self):
with ArchiveMaker() as arc:
@@ -3710,6 +3787,80 @@ class TestExtractionFilters(unittest.TestCase):
self.expect_file('parent/evil')
@symlink_test
+ @os_helper.skip_unless_symlink
+ def test_realpath_limit_attack(self):
+ # (CVE-2025-4517)
+
+ with ArchiveMaker() as arc:
+ # populate the symlinks and dirs that expand in os.path.realpath()
+ # The component length is chosen so that in common cases, the unexpanded
+ # path fits in PATH_MAX, but it overflows when the final symlink
+ # is expanded
+ steps = "abcdefghijklmnop"
+ if sys.platform == 'win32':
+ component = 'd' * 25
+ elif 'PC_PATH_MAX' in os.pathconf_names:
+ max_path_len = os.pathconf(self.outerdir.parent, "PC_PATH_MAX")
+ path_sep_len = 1
+ dest_len = len(str(self.destdir)) + path_sep_len
+ component_len = (max_path_len - dest_len) // (len(steps) + path_sep_len)
+ component = 'd' * component_len
+ else:
+ raise NotImplementedError("Need to guess component length for {sys.platform}")
+ path = ""
+ step_path = ""
+ for i in steps:
+ arc.add(os.path.join(path, component), type=tarfile.DIRTYPE,
+ mode='drwxrwxrwx')
+ arc.add(os.path.join(path, i), symlink_to=component)
+ path = os.path.join(path, component)
+ step_path = os.path.join(step_path, i)
+ # create the final symlink that exceeds PATH_MAX and simply points
+ # to the top dir.
+ # this link will never be expanded by
+ # os.path.realpath(strict=False), nor anything after it.
+ linkpath = os.path.join(*steps, "l"*254)
+ parent_segments = [".."] * len(steps)
+ arc.add(linkpath, symlink_to=os.path.join(*parent_segments))
+ # make a symlink outside to keep the tar command happy
+ arc.add("escape", symlink_to=os.path.join(linkpath, ".."))
+ # use the symlinks above, that are not checked, to create a hardlink
+ # to a file outside of the destination path
+ arc.add("flaglink", hardlink_to=os.path.join("escape", "flag"))
+ # now that we have the hardlink we can overwrite the file
+ arc.add("flaglink", content='overwrite')
+ # we can also create new files as well!
+ arc.add("escape/newfile", content='new')
+
+ with (self.subTest('fully_trusted'),
+ self.check_context(arc.open(), filter='fully_trusted',
+ check_flag=False)):
+ if sys.platform == 'win32':
+ self.expect_exception((FileNotFoundError, FileExistsError))
+ elif self.raised_exception:
+ # Cannot symlink/hardlink: tarfile falls back to getmember()
+ self.expect_exception(KeyError)
+ # Otherwise, this block should never enter.
+ else:
+ self.expect_any_tree(component)
+ self.expect_file('flaglink', content='overwrite')
+ self.expect_file('../newfile', content='new')
+ self.expect_file('escape', type=tarfile.SYMTYPE)
+ self.expect_file('a', symlink_to=component)
+
+ for filter in 'tar', 'data':
+ with self.subTest(filter), self.check_context(arc.open(), filter=filter):
+ exc = self.expect_exception((OSError, KeyError))
+ if isinstance(exc, OSError):
+ if sys.platform == 'win32':
+ # 3: ERROR_PATH_NOT_FOUND
+ # 5: ERROR_ACCESS_DENIED
+ # 206: ERROR_FILENAME_EXCED_RANGE
+ self.assertIn(exc.winerror, (3, 5, 206))
+ else:
+ self.assertEqual(exc.errno, errno.ENAMETOOLONG)
+
+ @symlink_test
def test_parent_symlink2(self):
# Test interplaying symlinks
# Inspired by 'dirsymlink2b' in jwilk/traversal-archives
@@ -3931,8 +4082,8 @@ class TestExtractionFilters(unittest.TestCase):
arc.add('symlink2', symlink_to=os.path.join(
'linkdir', 'hardlink2'))
arc.add('targetdir/target', size=3)
- arc.add('linkdir/hardlink', hardlink_to='targetdir/target')
- arc.add('linkdir/hardlink2', hardlink_to='linkdir/symlink')
+ arc.add('linkdir/hardlink', hardlink_to=os.path.join('targetdir', 'target'))
+ arc.add('linkdir/hardlink2', hardlink_to=os.path.join('linkdir', 'symlink'))
for filter in 'tar', 'data', 'fully_trusted':
with self.check_context(arc.open(), filter):
@@ -3948,6 +4099,129 @@ class TestExtractionFilters(unittest.TestCase):
self.expect_file('linkdir/symlink', size=3)
self.expect_file('symlink2', size=3)
+ @symlink_test
+ def test_sneaky_hardlink_fallback(self):
+ # (CVE-2025-4330)
+ # Test that when hardlink extraction falls back to extracting members
+ # from the archive, the extracted member is (re-)filtered.
+ with ArchiveMaker() as arc:
+ # Create a directory structure so the c/escape symlink stays
+ # inside the path
+ arc.add("a/t/dummy")
+ # Create b/ directory
+ arc.add("b/")
+ # Point "c" to the bottom of the tree in "a"
+ arc.add("c", symlink_to=os.path.join("a", "t"))
+ # link to non-existant location under "a"
+ arc.add("c/escape", symlink_to=os.path.join("..", "..",
+ "link_here"))
+ # Move "c" to point to "b" ("c/escape" no longer exists)
+ arc.add("c", symlink_to="b")
+ # Attempt to create a hard link to "c/escape". Since it doesn't
+ # exist it will attempt to extract "cescape" but at "boom".
+ arc.add("boom", hardlink_to=os.path.join("c", "escape"))
+
+ with self.check_context(arc.open(), 'data'):
+ if not os_helper.can_symlink():
+ # When 'c/escape' is extracted, 'c' is a regular
+ # directory, and 'c/escape' *would* point outside
+ # the destination if symlinks were allowed.
+ self.expect_exception(
+ tarfile.LinkOutsideDestinationError)
+ elif sys.platform == "win32":
+ # On Windows, 'c/escape' points outside the destination
+ self.expect_exception(tarfile.LinkOutsideDestinationError)
+ else:
+ e = self.expect_exception(
+ tarfile.LinkFallbackError,
+ "link 'boom' would be extracted as a copy of "
+ + "'c/escape', which was rejected")
+ self.assertIsInstance(e.__cause__,
+ tarfile.LinkOutsideDestinationError)
+ for filter in 'tar', 'fully_trusted':
+ with self.subTest(filter), self.check_context(arc.open(), filter):
+ if not os_helper.can_symlink():
+ self.expect_file("a/t/dummy")
+ self.expect_file("b/")
+ self.expect_file("c/")
+ else:
+ self.expect_file("a/t/dummy")
+ self.expect_file("b/")
+ self.expect_file("a/t/escape", symlink_to='../../link_here')
+ self.expect_file("boom", symlink_to='../../link_here')
+ self.expect_file("c", symlink_to='b')
+
+ @symlink_test
+ def test_exfiltration_via_symlink(self):
+ # (CVE-2025-4138)
+ # Test changing symlinks that result in a symlink pointing outside
+ # the extraction directory, unless prevented by 'data' filter's
+ # normalization.
+ with ArchiveMaker() as arc:
+ arc.add("escape", symlink_to=os.path.join('link', 'link', '..', '..', 'link-here'))
+ arc.add("link", symlink_to='./')
+
+ for filter in 'tar', 'data', 'fully_trusted':
+ with self.check_context(arc.open(), filter):
+ if os_helper.can_symlink():
+ self.expect_file("link", symlink_to='./')
+ if filter == 'data':
+ self.expect_file("escape", symlink_to='link-here')
+ else:
+ self.expect_file("escape",
+ symlink_to='link/link/../../link-here')
+ else:
+ # Nothing is extracted.
+ pass
+
+ @symlink_test
+ def test_chmod_outside_dir(self):
+ # (CVE-2024-12718)
+ # Test that members used for delayed updates of directory metadata
+ # are (re-)filtered.
+ with ArchiveMaker() as arc:
+ # "pwn" is a veeeery innocent symlink:
+ arc.add("a/pwn", symlink_to='.')
+ # But now "pwn" is also a directory, so it's scheduled to have its
+ # metadata updated later:
+ arc.add("a/pwn/", mode='drwxrwxrwx')
+ # Oops, "pwn" is not so innocent any more:
+ arc.add("a/pwn", symlink_to='x/../')
+ # Newly created symlink points to the dest dir,
+ # so it's OK for the "data" filter.
+ arc.add('a/x', symlink_to=('../'))
+ # But now "pwn" points outside the dest dir
+
+ for filter in 'tar', 'data', 'fully_trusted':
+ with self.check_context(arc.open(), filter) as cc:
+ if not os_helper.can_symlink():
+ self.expect_file("a/pwn/")
+ elif filter == 'data':
+ self.expect_file("a/x", symlink_to='../')
+ self.expect_file("a/pwn", symlink_to='.')
+ else:
+ self.expect_file("a/x", symlink_to='../')
+ self.expect_file("a/pwn", symlink_to='x/../')
+ if sys.platform != "win32":
+ st_mode = cc.outerdir.stat().st_mode
+ self.assertNotEqual(st_mode & 0o777, 0o777)
+
+ def test_link_fallback_normalizes(self):
+ # Make sure hardlink fallbacks work for non-normalized paths for all
+ # filters
+ with ArchiveMaker() as arc:
+ arc.add("dir/")
+ arc.add("dir/../afile")
+ arc.add("link1", hardlink_to='dir/../afile')
+ arc.add("link2", hardlink_to='dir/../dir/../afile')
+
+ for filter in 'tar', 'data', 'fully_trusted':
+ with self.check_context(arc.open(), filter) as cc:
+ self.expect_file("dir/")
+ self.expect_file("afile")
+ self.expect_file("link1")
+ self.expect_file("link2")
+
def test_modes(self):
# Test how file modes are extracted
# (Note that the modes are ignored on platforms without working chmod)
@@ -4072,7 +4346,7 @@ class TestExtractionFilters(unittest.TestCase):
# The 'tar' filter returns TarInfo objects with the same name/type.
# (It can also fail for particularly "evil" input, but we don't have
# that in the test archive.)
- with tarfile.TarFile.open(tarname) as tar:
+ with tarfile.TarFile.open(tarname, encoding="iso8859-1") as tar:
for tarinfo in tar.getmembers():
try:
filtered = tarfile.tar_filter(tarinfo, '')
@@ -4084,7 +4358,7 @@ class TestExtractionFilters(unittest.TestCase):
def test_data_filter(self):
# The 'data' filter either raises, or returns TarInfo with the same
# name/type.
- with tarfile.TarFile.open(tarname) as tar:
+ with tarfile.TarFile.open(tarname, encoding="iso8859-1") as tar:
for tarinfo in tar.getmembers():
try:
filtered = tarfile.data_filter(tarinfo, '')
@@ -4242,13 +4516,13 @@ class TestExtractionFilters(unittest.TestCase):
# If errorlevel is 0, errors affected by errorlevel are ignored
with self.check_context(arc.open(errorlevel=0), extracterror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=0), filtererror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=0), oserror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=0), tarerror_filter):
self.expect_exception(tarfile.TarError)
@@ -4259,7 +4533,7 @@ class TestExtractionFilters(unittest.TestCase):
# If 1, all fatal errors are raised
with self.check_context(arc.open(errorlevel=1), extracterror_filter):
- self.expect_file('file')
+ pass
with self.check_context(arc.open(errorlevel=1), filtererror_filter):
self.expect_exception(tarfile.FilterError)
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index b9be87f357f..74b979d0096 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -4188,6 +4188,15 @@ class SuggestionFormattingTestBase:
self.assertNotIn("blech", actual)
self.assertNotIn("oh no!", actual)
+ def test_attribute_error_with_non_string_candidates(self):
+ class T:
+ bluch = 1
+
+ instance = T()
+ instance.__dict__[0] = 1
+ actual = self.get_suggestion(instance, 'blich')
+ self.assertIn("bluch", actual)
+
def test_attribute_error_with_bad_name(self):
def raise_attribute_error_with_bad_name():
raise AttributeError(name=12, obj=23)
@@ -4223,8 +4232,8 @@ class SuggestionFormattingTestBase:
return mod_name
- def get_import_from_suggestion(self, mod_dict, name):
- modname = self.make_module(mod_dict)
+ def get_import_from_suggestion(self, code, name):
+ modname = self.make_module(code)
def callable():
try:
@@ -4301,6 +4310,13 @@ class SuggestionFormattingTestBase:
self.assertIn("'_bluch'", self.get_import_from_suggestion(code, '_luch'))
self.assertNotIn("'_bluch'", self.get_import_from_suggestion(code, 'bluch'))
+ def test_import_from_suggestions_non_string(self):
+ modWithNonStringAttr = textwrap.dedent("""\
+ globals()[0] = 1
+ bluch = 1
+ """)
+ self.assertIn("'bluch'", self.get_import_from_suggestion(modWithNonStringAttr, 'blech'))
+
def test_import_from_suggestions_do_not_trigger_for_long_attributes(self):
code = "blech = None"
@@ -4397,6 +4413,15 @@ class SuggestionFormattingTestBase:
actual = self.get_suggestion(func)
self.assertIn("'ZeroDivisionError'?", actual)
+ def test_name_error_suggestions_with_non_string_candidates(self):
+ def func():
+ abc = 1
+ custom_globals = globals().copy()
+ custom_globals[0] = 1
+ print(eval("abv", custom_globals, locals()))
+ actual = self.get_suggestion(func)
+ self.assertIn("abc", actual)
+
def test_name_error_suggestions_do_not_trigger_for_long_names(self):
def func():
somethingverywronghehehehehehe = None
diff --git a/Lib/test/test_zstd.py b/Lib/test/test_zstd.py
index 014634e450e..d4c28aed38e 100644
--- a/Lib/test/test_zstd.py
+++ b/Lib/test/test_zstd.py
@@ -293,11 +293,11 @@ class CompressorTestCase(unittest.TestCase):
# zstd lib doesn't support MT compression
if not SUPPORT_MULTITHREADING:
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdCompressor(options={CompressionParameter.nb_workers:4})
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdCompressor(options={CompressionParameter.job_size:4})
- with self.assertRaises(ZstdError):
+ with self.assertRaises(ValueError):
ZstdCompressor(options={CompressionParameter.overlap_log:4})
# out of bounds error msg
@@ -395,6 +395,115 @@ class CompressorTestCase(unittest.TestCase):
c = ZstdCompressor()
self.assertNotEqual(c.compress(b'', c.FLUSH_FRAME), b'')
+ def test_set_pledged_input_size(self):
+ DAT = DECOMPRESSED_100_PLUS_32KB
+ CHUNK_SIZE = len(DAT) // 3
+
+ # wrong value
+ c = ZstdCompressor()
+ with self.assertRaisesRegex(ValueError,
+ r'should be a positive int less than \d+'):
+ c.set_pledged_input_size(-300)
+ # overflow
+ with self.assertRaisesRegex(ValueError,
+ r'should be a positive int less than \d+'):
+ c.set_pledged_input_size(2**64)
+ # ZSTD_CONTENTSIZE_ERROR is invalid
+ with self.assertRaisesRegex(ValueError,
+ r'should be a positive int less than \d+'):
+ c.set_pledged_input_size(2**64-2)
+ # ZSTD_CONTENTSIZE_UNKNOWN should use None
+ with self.assertRaisesRegex(ValueError,
+ r'should be a positive int less than \d+'):
+ c.set_pledged_input_size(2**64-1)
+
+ # check valid values are settable
+ c.set_pledged_input_size(2**63)
+ c.set_pledged_input_size(2**64-3)
+
+ # check that zero means empty frame
+ c = ZstdCompressor(level=1)
+ c.set_pledged_input_size(0)
+ c.compress(b'')
+ dat = c.flush()
+ ret = get_frame_info(dat)
+ self.assertEqual(ret.decompressed_size, 0)
+
+
+ # wrong mode
+ c = ZstdCompressor(level=1)
+ c.compress(b'123456')
+ self.assertEqual(c.last_mode, c.CONTINUE)
+ with self.assertRaisesRegex(ValueError,
+ r'last_mode == FLUSH_FRAME'):
+ c.set_pledged_input_size(300)
+
+ # None value
+ c = ZstdCompressor(level=1)
+ c.set_pledged_input_size(None)
+ dat = c.compress(DAT) + c.flush()
+
+ ret = get_frame_info(dat)
+ self.assertEqual(ret.decompressed_size, None)
+
+ # correct value
+ c = ZstdCompressor(level=1)
+ c.set_pledged_input_size(len(DAT))
+
+ chunks = []
+ posi = 0
+ while posi < len(DAT):
+ dat = c.compress(DAT[posi:posi+CHUNK_SIZE])
+ posi += CHUNK_SIZE
+ chunks.append(dat)
+
+ dat = c.flush()
+ chunks.append(dat)
+ chunks = b''.join(chunks)
+
+ ret = get_frame_info(chunks)
+ self.assertEqual(ret.decompressed_size, len(DAT))
+ self.assertEqual(decompress(chunks), DAT)
+
+ c.set_pledged_input_size(len(DAT)) # the second frame
+ dat = c.compress(DAT) + c.flush()
+
+ ret = get_frame_info(dat)
+ self.assertEqual(ret.decompressed_size, len(DAT))
+ self.assertEqual(decompress(dat), DAT)
+
+ # not enough data
+ c = ZstdCompressor(level=1)
+ c.set_pledged_input_size(len(DAT)+1)
+
+ for start in range(0, len(DAT), CHUNK_SIZE):
+ end = min(start+CHUNK_SIZE, len(DAT))
+ _dat = c.compress(DAT[start:end])
+
+ with self.assertRaises(ZstdError):
+ c.flush()
+
+ # too much data
+ c = ZstdCompressor(level=1)
+ c.set_pledged_input_size(len(DAT))
+
+ for start in range(0, len(DAT), CHUNK_SIZE):
+ end = min(start+CHUNK_SIZE, len(DAT))
+ _dat = c.compress(DAT[start:end])
+
+ with self.assertRaises(ZstdError):
+ c.compress(b'extra', ZstdCompressor.FLUSH_FRAME)
+
+ # content size not set if content_size_flag == 0
+ c = ZstdCompressor(options={CompressionParameter.content_size_flag: 0})
+ c.set_pledged_input_size(10)
+ dat1 = c.compress(b"hello")
+ dat2 = c.compress(b"world")
+ dat3 = c.flush()
+ frame_data = get_frame_info(dat1 + dat2 + dat3)
+ self.assertIsNone(frame_data.decompressed_size)
+
+
class DecompressorTestCase(unittest.TestCase):
def test_simple_decompress_bad_args(self):
@@ -1138,27 +1247,41 @@ class ZstdDictTestCase(unittest.TestCase):
ZstdDecompressor(zd)
# wrong type
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
- ZstdCompressor(zstd_dict=(zd, b'123'))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdCompressor(zstd_dict=[zd, 1])
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdCompressor(zstd_dict=(zd, 1.0))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdCompressor(zstd_dict=(zd,))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdCompressor(zstd_dict=(zd, 1, 2))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdCompressor(zstd_dict=(zd, -1))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdCompressor(zstd_dict=(zd, 3))
-
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
- ZstdDecompressor(zstd_dict=(zd, b'123'))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaises(OverflowError):
+ ZstdCompressor(zstd_dict=(zd, 2**1000))
+ with self.assertRaises(OverflowError):
+ ZstdCompressor(zstd_dict=(zd, -2**1000))
+
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdDecompressor(zstd_dict=[zd, 1])
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdDecompressor(zstd_dict=(zd, 1.0))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
+ ZstdDecompressor((zd,))
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdDecompressor((zd, 1, 2))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdDecompressor((zd, -1))
- with self.assertRaisesRegex(TypeError, r'should be ZstdDict object'):
+ with self.assertRaisesRegex(TypeError, r'should be a ZstdDict object'):
ZstdDecompressor((zd, 3))
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor((zd, 2**1000))
+ with self.assertRaises(OverflowError):
+ ZstdDecompressor((zd, -2**1000))
def test_train_dict(self):
-
-
TRAINED_DICT = train_dict(SAMPLES, DICT_SIZE1)
ZstdDict(TRAINED_DICT.dict_content, is_raw=False)
@@ -1240,17 +1363,36 @@ class ZstdDictTestCase(unittest.TestCase):
with self.assertRaises(TypeError):
_zstd.train_dict({}, (), 100)
with self.assertRaises(TypeError):
+ _zstd.train_dict(bytearray(), (), 100)
+ with self.assertRaises(TypeError):
_zstd.train_dict(b'', 99, 100)
with self.assertRaises(TypeError):
+ _zstd.train_dict(b'', [], 100)
+ with self.assertRaises(TypeError):
_zstd.train_dict(b'', (), 100.1)
+ with self.assertRaises(TypeError):
+ _zstd.train_dict(b'', (99.1,), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'abc', (4, -1), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'abc', (2,), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'', (99,), 100)
# size > size_t
with self.assertRaises(ValueError):
- _zstd.train_dict(b'', (2**64+1,), 100)
+ _zstd.train_dict(b'', (2**1000,), 100)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'', (-2**1000,), 100)
# dict_size <= 0
with self.assertRaises(ValueError):
_zstd.train_dict(b'', (), 0)
+ with self.assertRaises(ValueError):
+ _zstd.train_dict(b'', (), -1)
+
+ with self.assertRaises(ZstdError):
+ _zstd.train_dict(b'', (), 1)
def test_finalize_dict_c(self):
with self.assertRaises(TypeError):
@@ -1260,21 +1402,50 @@ class ZstdDictTestCase(unittest.TestCase):
with self.assertRaises(TypeError):
_zstd.finalize_dict({}, b'', (), 100, 5)
with self.assertRaises(TypeError):
+ _zstd.finalize_dict(bytearray(TRAINED_DICT.dict_content), b'', (), 100, 5)
+ with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, {}, (), 100, 5)
with self.assertRaises(TypeError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, bytearray(), (), 100, 5)
+ with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', 99, 100, 5)
with self.assertRaises(TypeError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', [], 100, 5)
+ with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100.1, 5)
with self.assertRaises(TypeError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, 5.1)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'abc', (4, -1), 100, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'abc', (2,), 100, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (99,), 100, 5)
+
# size > size_t
with self.assertRaises(ValueError):
- _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (2**64+1,), 100, 5)
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (2**1000,), 100, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (-2**1000,), 100, 5)
# dict_size <= 0
with self.assertRaises(ValueError):
_zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 0, 5)
+ with self.assertRaises(ValueError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), -1, 5)
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 2**1000, 5)
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), -2**1000, 5)
+
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, 2**1000)
+ with self.assertRaises(OverflowError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, -2**1000)
+
+ with self.assertRaises(ZstdError):
+ _zstd.finalize_dict(TRAINED_DICT.dict_content, b'', (), 100, 5)
def test_train_buffer_protocol_samples(self):
def _nbytes(dat):
diff --git a/Lib/traceback.py b/Lib/traceback.py
index 17b082eced6..a1f175dbbaa 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -1595,7 +1595,11 @@ def _compute_suggestion_error(exc_value, tb, wrong_name):
if isinstance(exc_value, AttributeError):
obj = exc_value.obj
try:
- d = dir(obj)
+ try:
+ d = dir(obj)
+ except TypeError: # Attributes are unsortable, e.g. int and str
+ d = list(obj.__class__.__dict__.keys()) + list(obj.__dict__.keys())
+ d = sorted([x for x in d if isinstance(x, str)])
hide_underscored = (wrong_name[:1] != '_')
if hide_underscored and tb is not None:
while tb.tb_next is not None:
@@ -1610,7 +1614,11 @@ def _compute_suggestion_error(exc_value, tb, wrong_name):
elif isinstance(exc_value, ImportError):
try:
mod = __import__(exc_value.name)
- d = dir(mod)
+ try:
+ d = dir(mod)
+ except TypeError: # Attributes are unsortable, e.g. int and str
+ d = list(mod.__dict__.keys())
+ d = sorted([x for x in d if isinstance(x, str)])
if wrong_name[:1] != '_':
d = [x for x in d if x[:1] != '_']
except Exception:
@@ -1628,6 +1636,7 @@ def _compute_suggestion_error(exc_value, tb, wrong_name):
+ list(frame.f_globals)
+ list(frame.f_builtins)
)
+ d = [x for x in d if isinstance(x, str)]
# Check first if we are in a method and the instance
# has the wrong name as attribute
diff --git a/Misc/ACKS b/Misc/ACKS
index 571142e7e49..2435943f1bb 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -1365,6 +1365,7 @@ Milan Oberkirch
Pascal Oberndoerfer
Géry Ogam
Seonkyo Ok
+Andrea Oliveri
Jeffrey Ollie
Adam Olsen
Bryan Olson
diff --git a/Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst b/Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst
new file mode 100644
index 00000000000..a742a6add8a
--- /dev/null
+++ b/Misc/NEWS.d/next/Build/2025-05-30-11-02-30.gh-issue-134923.gBkRg4.rst
@@ -0,0 +1,3 @@
+Windows builds with profile-guided optimization enabled now use
+``/GENPROFILE`` and ``/USEPROFILE`` instead of deprecated ``/LTCG:``
+options.
diff --git a/Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst b/Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst
new file mode 100644
index 00000000000..a85d2e90576
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-05-30-11-33-17.gh-issue-134745.GN-zk2.rst
@@ -0,0 +1,3 @@
+Change :c:func:`!PyThread_allocate_lock` implementation to ``PyMutex``.
+On Windows, :c:func:`!PyThread_acquire_lock_timed` now supports the *intr_flag*
+parameter: it can be interrupted. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst b/Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst
new file mode 100644
index 00000000000..e49f7651065
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-06-02-13-19-22.gh-issue-134989.sDDyBN.rst
@@ -0,0 +1,2 @@
+Implement :c:func:`PyObject_DelAttr` and :c:func:`PyObject_DelAttrString` as
+macros in the limited C API 3.12 and older. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/C_API/2025-06-05-11-06-07.gh-issue-134989.74p4ud.rst b/Misc/NEWS.d/next/C_API/2025-06-05-11-06-07.gh-issue-134989.74p4ud.rst
new file mode 100644
index 00000000000..844e9a66664
--- /dev/null
+++ b/Misc/NEWS.d/next/C_API/2025-06-05-11-06-07.gh-issue-134989.74p4ud.rst
@@ -0,0 +1,3 @@
+Fix ``Py_RETURN_NONE``, ``Py_RETURN_TRUE`` and ``Py_RETURN_FALSE`` macros in
+the limited C API 3.11 and older: don't treat ``Py_None``, ``Py_True`` and
+``Py_False`` as immortal. Patch by Victor Stinner.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst
new file mode 100644
index 00000000000..fc71cd21a36
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-28-23-58-50.gh-issue-117852.BO9g7z.rst
@@ -0,0 +1 @@
+Fix argument checking of :meth:`~agen.athrow`.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst
new file mode 100644
index 00000000000..3178f0aaf88
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-15-56-19.gh-issue-134908.3a7PxM.rst
@@ -0,0 +1 @@
+Fix crash when iterating over lines in a text file on the :term:`free threaded <free threading>` build.
diff --git a/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-18-09-54.gh-issue-134889.Ic9UM-.rst b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-18-09-54.gh-issue-134889.Ic9UM-.rst
new file mode 100644
index 00000000000..3b86134bf16
--- /dev/null
+++ b/Misc/NEWS.d/next/Core_and_Builtins/2025-05-30-18-09-54.gh-issue-134889.Ic9UM-.rst
@@ -0,0 +1,2 @@
+Fix handling of a few opcodes that leave operands on the stack when
+optimizing ``LOAD_FAST``.
diff --git a/Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst b/Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst
new file mode 100644
index 00000000000..157522f9aab
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-03-09-03-13-41.gh-issue-130999.tBRBVB.rst
@@ -0,0 +1,2 @@
+Avoid exiting the new REPL and offer suggestions even if there are non-string
+candidates when errors occur.
diff --git a/Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst b/Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst
new file mode 100644
index 00000000000..cfc783900de
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-03-13-20-48-58.gh-issue-123471.cM4w4f.rst
@@ -0,0 +1 @@
+Make concurrent iterations over :class:`itertools.cycle` safe under free-threading.
diff --git a/Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst b/Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst
new file mode 100644
index 00000000000..d9e2eae02dc
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-07-06-41-54.gh-issue-131884.ym9BJN.rst
@@ -0,0 +1 @@
+Fix formatting issues in :func:`json.dump` when both *indent* and *skipkeys* are used.
diff --git a/Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst b/Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst
new file mode 100644
index 00000000000..63fed60ced0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-21-01-03-15.gh-issue-127081.WXRliX.rst
@@ -0,0 +1,2 @@
+Fix libc thread safety issues with :mod:`os` by replacing ``getlogin`` with
+``getlogin_r`` re-entrant version.
diff --git a/Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst b/Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst
new file mode 100644
index 00000000000..e33b061bb9b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-04-25-16-06-53.gh-issue-132908.wV5rja.rst
@@ -0,0 +1,2 @@
+Add :func:`math.isnormal` and :func:`math.issubnormal` functions. Patch by
+Sergey B Kirpichev.
diff --git a/Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst b/Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst
new file mode 100644
index 00000000000..55608528a45
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-01-10-56-44.gh-issue-132813.rKurvp.rst
@@ -0,0 +1,2 @@
+Improve error messages for incorrect types and values of :class:`csv.Dialect`
+attributes.
diff --git a/Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst b/Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst
new file mode 100644
index 00000000000..0c07beb7693
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-08-13-43-19.gh-issue-133489.9eGS1Z.rst
@@ -0,0 +1,2 @@
+:func:`random.getrandbits` can now generate more that 2\ :sup:`31` bits.
+:func:`random.randbytes` can now generate more that 256 MiB.
diff --git a/Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst b/Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst
new file mode 100644
index 00000000000..a9a56d9239b
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-15-00-27-09.gh-issue-134004.e8k4-R.rst
@@ -0,0 +1,2 @@
+:mod:`shelve` as well as underlying :mod:`!dbm.dumb` and :mod:`!dbm.sqlite` now have :meth:`!reorganize` methods to
+recover unused free space previously occupied by deleted entries.
diff --git a/Misc/NEWS.d/next/Library/2025-05-18-23-46-21.gh-issue-134152.30HwbX.rst b/Misc/NEWS.d/next/Library/2025-05-18-23-46-21.gh-issue-134152.30HwbX.rst
new file mode 100644
index 00000000000..911a4a59ea6
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-18-23-46-21.gh-issue-134152.30HwbX.rst
@@ -0,0 +1 @@
+:mod:`email`: Fix parsing of email message ID with invalid domain.
diff --git a/Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst b/Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst
new file mode 100644
index 00000000000..282eb088b89
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-26-14-04-39.gh-issue-134696.P04xUa.rst
@@ -0,0 +1,5 @@
+Built-in HACL* and OpenSSL implementations of hash function constructors
+now correctly accept the same *documented* named arguments. For instance,
+:func:`~hashlib.md5` could be previously invoked as ``md5(data=data)``
+or ``md5(string=string)`` depending on the underlying implementation
+but these calls were not compatible. Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst b/Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst
new file mode 100644
index 00000000000..92e38c0bb5a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-28-20-49-29.gh-issue-134857.dVYXVO.rst
@@ -0,0 +1,3 @@
+Improve error report for :mod:`doctest`\ s run with :mod:`unittest`. Remove
+:mod:`!doctest` module frames from tracebacks and redundant newline
+character from a failure message.
diff --git a/Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst b/Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst
new file mode 100644
index 00000000000..4b05d42c109
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-29-06-53-40.gh-issue-134885.-_L22o.rst
@@ -0,0 +1,2 @@
+Fix possible crash in the :mod:`compression.zstd` module related to setting
+parameter types. Patch by Jelle Zijlstra.
diff --git a/Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst b/Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst
new file mode 100644
index 00000000000..e37cf121f5f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-29-17-39-13.gh-issue-108885.MegCRA.rst
@@ -0,0 +1,3 @@
+Run each example as a subtest in unit tests synthesized by
+:func:`doctest.DocFileSuite` and :func:`doctest.DocTestSuite`.
+Add the :meth:`doctest.DocTestRunner.report_skip` method.
diff --git a/Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst b/Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst
new file mode 100644
index 00000000000..922ab168fdd
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-30-13-07-29.gh-issue-134718.9Qvhxn.rst
@@ -0,0 +1,2 @@
+:func:`ast.dump` now only omits ``None`` and ``[]`` values if they are
+default values.
diff --git a/Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst b/Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst
new file mode 100644
index 00000000000..06c1d5583be
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-30-18-13-48.gh-issue-134718.5FEspx.rst
@@ -0,0 +1 @@
+By default, omit optional ``Load()`` values in :func:`ast.dump`.
diff --git a/Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst b/Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst
new file mode 100644
index 00000000000..20f53569ef4
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-31-12-08-12.gh-issue-134970.lgSaxq.rst
@@ -0,0 +1,3 @@
+Fix the "unknown action" exception in
+:meth:`argparse.ArgumentParser.add_argument_group` to correctly replace the
+action class.
diff --git a/Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst b/Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst
new file mode 100644
index 00000000000..e75ce1622d6
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-05-31-15-49-46.gh-issue-134978.mXXuvW.rst
@@ -0,0 +1,7 @@
+:mod:`hashlib`: Supporting the ``string`` keyword parameter in hash function
+constructors such as :func:`~hashlib.new` or the direct hash-named constructors
+such as :func:`~hashlib.md5` and :func:`~hashlib.sha256` is now deprecated and
+slated for removal in Python 3.19.
+Prefer passing the initial data as a positional argument for maximum backwards
+compatibility.
+Patch by Bénédikt Tran.
diff --git a/Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst b/Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst
new file mode 100644
index 00000000000..1defb9a72e0
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-01-15-13-07.gh-issue-66234.Jw7OdC.rst
@@ -0,0 +1,3 @@
+Add the ``'m'`` flag for :func:`dbm.gnu.open` which allows to disable the
+use of :manpage:`mmap(2)`. This may harm performance, but improve crash
+tolerance.
diff --git a/Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst b/Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst
new file mode 100644
index 00000000000..e07200f9a3f
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-02-14-28-30.gh-issue-130662.EIgIR8.rst
@@ -0,0 +1,3 @@
+Accept leading zeros in precision and width fields for
+:class:`~fractions.Fraction` formatting, for example ``format(Fraction(1,
+3), '.016f')``.
diff --git a/Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst b/Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst
new file mode 100644
index 00000000000..d97d937376a
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-06-02-14-36-28.gh-issue-130662.Gpr2GB.rst
@@ -0,0 +1,3 @@
++Accept leading zeros in precision and width fields for
++:class:`~decimal.Decimal` formatting, for example ``format(Decimal(1.25),
+'.016f')``.
diff --git a/Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst b/Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst
new file mode 100644
index 00000000000..08a0087e203
--- /dev/null
+++ b/Misc/NEWS.d/next/Security/2025-06-02-11-32-23.gh-issue-135034.RLGjbp.rst
@@ -0,0 +1,6 @@
+Fixes multiple issues that allowed ``tarfile`` extraction filters
+(``filter="data"`` and ``filter="tar"``) to be bypassed using crafted
+symlinks and hard links.
+
+Addresses :cve:`2024-12718`, :cve:`2025-4138`, :cve:`2025-4330`, and :cve:`2025-4517`.
+
diff --git a/Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst b/Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst
new file mode 100644
index 00000000000..36e70b1c0d8
--- /dev/null
+++ b/Misc/NEWS.d/next/Windows/2025-06-03-18-26-54.gh-issue-135099.Q9usKm.rst
@@ -0,0 +1,2 @@
+Fix a crash that could occur on Windows when a background thread waits on a
+:c:type:`PyMutex` while the main thread is shutting down the interpreter.
diff --git a/Modules/_csv.c b/Modules/_csv.c
index e5ae853590b..2e04136e0ac 100644
--- a/Modules/_csv.c
+++ b/Modules/_csv.c
@@ -237,7 +237,7 @@ _set_int(const char *name, int *target, PyObject *src, int dflt)
int value;
if (!PyLong_CheckExact(src)) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be an integer", name);
+ "\"%s\" must be an integer, not %T", name, src);
return -1;
}
value = PyLong_AsInt(src);
@@ -255,27 +255,29 @@ _set_char_or_none(const char *name, Py_UCS4 *target, PyObject *src, Py_UCS4 dflt
if (src == NULL) {
*target = dflt;
}
- else {
+ else if (src == Py_None) {
*target = NOT_SET;
- if (src != Py_None) {
- if (!PyUnicode_Check(src)) {
- PyErr_Format(PyExc_TypeError,
- "\"%s\" must be string or None, not %.200s", name,
- Py_TYPE(src)->tp_name);
- return -1;
- }
- Py_ssize_t len = PyUnicode_GetLength(src);
- if (len < 0) {
- return -1;
- }
- if (len != 1) {
- PyErr_Format(PyExc_TypeError,
- "\"%s\" must be a 1-character string",
- name);
- return -1;
- }
- *target = PyUnicode_READ_CHAR(src, 0);
+ }
+ else {
+ // similar to PyArg_Parse("C?")
+ if (!PyUnicode_Check(src)) {
+ PyErr_Format(PyExc_TypeError,
+ "\"%s\" must be a unicode character or None, not %T",
+ name, src);
+ return -1;
+ }
+ Py_ssize_t len = PyUnicode_GetLength(src);
+ if (len < 0) {
+ return -1;
}
+ if (len != 1) {
+ PyErr_Format(PyExc_TypeError,
+ "\"%s\" must be a unicode character or None, "
+ "not a string of length %zd",
+ name, len);
+ return -1;
+ }
+ *target = PyUnicode_READ_CHAR(src, 0);
}
return 0;
}
@@ -287,11 +289,12 @@ _set_char(const char *name, Py_UCS4 *target, PyObject *src, Py_UCS4 dflt)
*target = dflt;
}
else {
+ // similar to PyArg_Parse("C")
if (!PyUnicode_Check(src)) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be string, not %.200s", name,
- Py_TYPE(src)->tp_name);
- return -1;
+ "\"%s\" must be a unicode character, not %T",
+ name, src);
+ return -1;
}
Py_ssize_t len = PyUnicode_GetLength(src);
if (len < 0) {
@@ -299,8 +302,9 @@ _set_char(const char *name, Py_UCS4 *target, PyObject *src, Py_UCS4 dflt)
}
if (len != 1) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be a 1-character string",
- name);
+ "\"%s\" must be a unicode character, "
+ "not a string of length %zd",
+ name, len);
return -1;
}
*target = PyUnicode_READ_CHAR(src, 0);
@@ -314,16 +318,12 @@ _set_str(const char *name, PyObject **target, PyObject *src, const char *dflt)
if (src == NULL)
*target = PyUnicode_DecodeASCII(dflt, strlen(dflt), NULL);
else {
- if (src == Py_None)
- *target = NULL;
- else if (!PyUnicode_Check(src)) {
+ if (!PyUnicode_Check(src)) {
PyErr_Format(PyExc_TypeError,
- "\"%s\" must be a string", name);
+ "\"%s\" must be a string, not %T", name, src);
return -1;
}
- else {
- Py_XSETREF(*target, Py_NewRef(src));
- }
+ Py_XSETREF(*target, Py_NewRef(src));
}
return 0;
}
@@ -533,11 +533,6 @@ dialect_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
/* validate options */
if (dialect_check_quoting(self->quoting))
goto err;
- if (self->delimiter == NOT_SET) {
- PyErr_SetString(PyExc_TypeError,
- "\"delimiter\" must be a 1-character string");
- goto err;
- }
if (quotechar == Py_None && quoting == NULL)
self->quoting = QUOTE_NONE;
if (self->quoting != QUOTE_NONE && self->quotechar == NOT_SET) {
@@ -545,10 +540,6 @@ dialect_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
"quotechar must be set if quoting enabled");
goto err;
}
- if (self->lineterminator == NULL) {
- PyErr_SetString(PyExc_TypeError, "lineterminator must be set");
- goto err;
- }
if (dialect_check_char("delimiter", self->delimiter, self, true) ||
dialect_check_char("escapechar", self->escapechar, self,
!self->skipinitialspace) ||
diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c
index 9c402e20e51..6a4939512b2 100644
--- a/Modules/_gdbmmodule.c
+++ b/Modules/_gdbmmodule.c
@@ -814,6 +814,11 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags,
iflags |= GDBM_NOLOCK;
break;
#endif
+#ifdef GDBM_NOMMAP
+ case 'm':
+ iflags |= GDBM_NOMMAP;
+ break;
+#endif
default:
PyErr_Format(state->gdbm_error,
"Flag '%c' is not supported.", (unsigned char)*flags);
@@ -847,6 +852,9 @@ static const char gdbmmodule_open_flags[] = "rwcn"
#ifdef GDBM_NOLOCK
"u"
#endif
+#ifdef GDBM_NOMMAP
+ "m"
+#endif
;
static PyMethodDef _gdbm_module_methods[] = {
diff --git a/Modules/_hashopenssl.c b/Modules/_hashopenssl.c
index dab0bb9b67f..331275076d7 100644
--- a/Modules/_hashopenssl.c
+++ b/Modules/_hashopenssl.c
@@ -1039,6 +1039,14 @@ exit:
return (PyObject *)self;
}
+#define CALL_HASHLIB_NEW(MODULE, NAME, DATA, STRING, USEDFORSECURITY) \
+ do { \
+ PyObject *data_obj; \
+ if (_Py_hashlib_data_argument(&data_obj, DATA, STRING) < 0) { \
+ return NULL; \
+ } \
+ return _hashlib_HASH(MODULE, NAME, data_obj, USEDFORSECURITY); \
+ } while (0)
/* The module-level function: new() */
@@ -1046,9 +1054,10 @@ exit:
_hashlib.new as _hashlib_HASH_new
name: str
- string as data_obj: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new hash object using the named algorithm.
@@ -1059,131 +1068,137 @@ The MD5 and SHA1 algorithms are always supported.
[clinic start generated code]*/
static PyObject *
-_hashlib_HASH_new_impl(PyObject *module, const char *name,
- PyObject *data_obj, int usedforsecurity)
-/*[clinic end generated code: output=30c6e7b9a5a4dce3 input=28848db5ccd0a9b5]*/
+_hashlib_HASH_new_impl(PyObject *module, const char *name, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=b905aaf9840c1bbd input=c34af6c6e696d44e]*/
{
- return _hashlib_HASH(module, name, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, name, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_md5
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a md5 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=87b0186440a44f8c input=990e36d5e689b16e]*/
+_hashlib_openssl_md5_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=ca8cf184d90f7432 input=e7c0adbd6a867db1]*/
{
- return _hashlib_HASH(module, Py_hash_md5, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_md5, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha1
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha1 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=6813024cf690670d input=948f2f4b6deabc10]*/
+_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=1736fb7b310d64be input=f7e5bb1711e952d8]*/
{
- return _hashlib_HASH(module, Py_hash_sha1, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha1, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha224
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha224 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=a2dfe7cc4eb14ebb input=f9272821fadca505]*/
+_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=0d6ff57be5e5c140 input=3820fff7ed3a53b8]*/
{
- return _hashlib_HASH(module, Py_hash_sha224, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha224, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha256
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha256 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=1f874a34870f0a68 input=549fad9d2930d4c5]*/
+_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=412ea7111555b6e7 input=9a2f115cf1f7e0eb]*/
{
- return _hashlib_HASH(module, Py_hash_sha256, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha256, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha384
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha384 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=58529eff9ca457b2 input=48601a6e3bf14ad7]*/
+_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=2e0dc395b59ed726 input=1ea48f6f01e77cfb]*/
{
- return _hashlib_HASH(module, Py_hash_sha384, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha384, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha512
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha512 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=2c744c9e4a40d5f6 input=c5c46a2a817aa98f]*/
+_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=4bdd760388dbfc0f input=3cf56903e07d1f5c]*/
{
- return _hashlib_HASH(module, Py_hash_sha512, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha512, data, string, usedforsecurity);
}
@@ -1192,77 +1207,81 @@ _hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
/*[clinic input]
_hashlib.openssl_sha3_224
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-224 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=144641c1d144b974 input=e3a01b2888916157]*/
+_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=6d8dc2a924f3ba35 input=7f14f16a9f6a3158]*/
{
- return _hashlib_HASH(module, Py_hash_sha3_224, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_224, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha3_256
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-256 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=c61f1ab772d06668 input=e2908126c1b6deed]*/
+_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=9e520f537b3a4622 input=7987150939d5e352]*/
{
- return _hashlib_HASH(module, Py_hash_sha3_256, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_256, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha3_384
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-384 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=f68e4846858cf0ee input=ec0edf5c792f8252]*/
+_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=d239ba0463fd6138 input=fc943401f67e3b81]*/
{
- return _hashlib_HASH(module, Py_hash_sha3_384, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_384, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_sha3_512
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a sha3-512 hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=2eede478c159354a input=64e2cc0c094d56f4]*/
+_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=17662f21038c2278 input=6601ddd2c6c1516d]*/
{
- return _hashlib_HASH(module, Py_hash_sha3_512, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_sha3_512, data, string, usedforsecurity);
}
#endif /* PY_OPENSSL_HAS_SHA3 */
@@ -1270,42 +1289,46 @@ _hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
/*[clinic input]
_hashlib.openssl_shake_128
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a shake-128 variable hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=bc49cdd8ada1fa97 input=6c9d67440eb33ec8]*/
+_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=4e6afed8d18980ad input=373c3f1c93d87b37]*/
{
- return _hashlib_HASH(module, Py_hash_shake_128, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_shake_128, data, string, usedforsecurity);
}
/*[clinic input]
_hashlib.openssl_shake_256
- string as data_obj: object(py_default="b''") = NULL
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Returns a shake-256 variable hash object; optionally initialized with a string
[clinic start generated code]*/
static PyObject *
-_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity)
-/*[clinic end generated code: output=358d213be8852df7 input=479cbe9fefd4a9f8]*/
+_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string)
+/*[clinic end generated code: output=62481bce4a77d16c input=101c139ea2ddfcbf]*/
{
- return _hashlib_HASH(module, Py_hash_shake_256, data_obj, usedforsecurity);
+ CALL_HASHLIB_NEW(module, Py_hash_shake_256, data, string, usedforsecurity);
}
#endif /* PY_OPENSSL_HAS_SHAKE */
+#undef CALL_HASHLIB_NEW
+
/*[clinic input]
_hashlib.pbkdf2_hmac as pbkdf2_hmac
diff --git a/Modules/_interpchannelsmodule.c b/Modules/_interpchannelsmodule.c
index bfd805bf5e4..ea2e5f99dfa 100644
--- a/Modules/_interpchannelsmodule.c
+++ b/Modules/_interpchannelsmodule.c
@@ -254,10 +254,10 @@ _get_current_module_state(void)
{
PyObject *mod = _get_current_module();
if (mod == NULL) {
- // XXX import it?
- PyErr_SetString(PyExc_RuntimeError,
- MODULE_NAME_STR " module not imported yet");
- return NULL;
+ mod = PyImport_ImportModule(MODULE_NAME_STR);
+ if (mod == NULL) {
+ return NULL;
+ }
}
module_state *state = get_module_state(mod);
Py_DECREF(mod);
diff --git a/Modules/_interpqueuesmodule.c b/Modules/_interpqueuesmodule.c
index ffc52c8ee74..71d8fd8716c 100644
--- a/Modules/_interpqueuesmodule.c
+++ b/Modules/_interpqueuesmodule.c
@@ -1356,10 +1356,10 @@ _queueobj_from_xid(_PyXIData_t *data)
PyObject *mod = _get_current_module();
if (mod == NULL) {
- // XXX import it?
- PyErr_SetString(PyExc_RuntimeError,
- MODULE_NAME_STR " module not imported yet");
- return NULL;
+ mod = PyImport_ImportModule(MODULE_NAME_STR);
+ if (mod == NULL) {
+ return NULL;
+ }
}
PyTypeObject *cls = get_external_queue_type(mod);
diff --git a/Modules/_interpretersmodule.c b/Modules/_interpretersmodule.c
index 376517ab923..037e9544543 100644
--- a/Modules/_interpretersmodule.c
+++ b/Modules/_interpretersmodule.c
@@ -72,6 +72,32 @@ is_running_main(PyInterpreterState *interp)
}
+static inline int
+is_notshareable_raised(PyThreadState *tstate)
+{
+ PyObject *exctype = _PyXIData_GetNotShareableErrorType(tstate);
+ return _PyErr_ExceptionMatches(tstate, exctype);
+}
+
+static void
+unwrap_not_shareable(PyThreadState *tstate)
+{
+ if (!is_notshareable_raised(tstate)) {
+ return;
+ }
+ PyObject *exc = _PyErr_GetRaisedException(tstate);
+ PyObject *cause = PyException_GetCause(exc);
+ if (cause != NULL) {
+ Py_DECREF(exc);
+ exc = cause;
+ }
+ else {
+ assert(PyException_GetContext(exc) == NULL);
+ }
+ _PyErr_SetRaisedException(tstate, exc);
+}
+
+
/* Cross-interpreter Buffer Views *******************************************/
/* When a memoryview object is "shared" between interpreters,
@@ -320,10 +346,10 @@ _get_current_module_state(void)
{
PyObject *mod = _get_current_module();
if (mod == NULL) {
- // XXX import it?
- PyErr_SetString(PyExc_RuntimeError,
- MODULE_NAME_STR " module not imported yet");
- return NULL;
+ mod = PyImport_ImportModule(MODULE_NAME_STR);
+ if (mod == NULL) {
+ return NULL;
+ }
}
module_state *state = get_module_state(mod);
Py_DECREF(mod);
@@ -422,76 +448,265 @@ config_from_object(PyObject *configobj, PyInterpreterConfig *config)
}
+struct interp_call {
+ _PyXIData_t *func;
+ _PyXIData_t *args;
+ _PyXIData_t *kwargs;
+ struct {
+ _PyXIData_t func;
+ _PyXIData_t args;
+ _PyXIData_t kwargs;
+ } _preallocated;
+};
+
+static void
+_interp_call_clear(struct interp_call *call)
+{
+ if (call->func != NULL) {
+ _PyXIData_Clear(NULL, call->func);
+ }
+ if (call->args != NULL) {
+ _PyXIData_Clear(NULL, call->args);
+ }
+ if (call->kwargs != NULL) {
+ _PyXIData_Clear(NULL, call->kwargs);
+ }
+ *call = (struct interp_call){0};
+}
+
+static int
+_interp_call_pack(PyThreadState *tstate, struct interp_call *call,
+ PyObject *func, PyObject *args, PyObject *kwargs)
+{
+ xidata_fallback_t fallback = _PyXIDATA_FULL_FALLBACK;
+ assert(call->func == NULL);
+ assert(call->args == NULL);
+ assert(call->kwargs == NULL);
+ // Handle the func.
+ if (!PyCallable_Check(func)) {
+ _PyErr_Format(tstate, PyExc_TypeError,
+ "expected a callable, got %R", func);
+ return -1;
+ }
+ if (_PyFunction_GetXIData(tstate, func, &call->_preallocated.func) < 0) {
+ PyObject *exc = _PyErr_GetRaisedException(tstate);
+ if (_PyPickle_GetXIData(tstate, func, &call->_preallocated.func) < 0) {
+ _PyErr_SetRaisedException(tstate, exc);
+ return -1;
+ }
+ Py_DECREF(exc);
+ }
+ call->func = &call->_preallocated.func;
+ // Handle the args.
+ if (args == NULL || args == Py_None) {
+ // Leave it empty.
+ }
+ else {
+ assert(PyTuple_Check(args));
+ if (PyTuple_GET_SIZE(args) > 0) {
+ if (_PyObject_GetXIData(
+ tstate, args, fallback, &call->_preallocated.args) < 0)
+ {
+ _interp_call_clear(call);
+ return -1;
+ }
+ call->args = &call->_preallocated.args;
+ }
+ }
+ // Handle the kwargs.
+ if (kwargs == NULL || kwargs == Py_None) {
+ // Leave it empty.
+ }
+ else {
+ assert(PyDict_Check(kwargs));
+ if (PyDict_GET_SIZE(kwargs) > 0) {
+ if (_PyObject_GetXIData(
+ tstate, kwargs, fallback, &call->_preallocated.kwargs) < 0)
+ {
+ _interp_call_clear(call);
+ return -1;
+ }
+ call->kwargs = &call->_preallocated.kwargs;
+ }
+ }
+ return 0;
+}
+
+static int
+_interp_call_unpack(struct interp_call *call,
+ PyObject **p_func, PyObject **p_args, PyObject **p_kwargs)
+{
+ // Unpack the func.
+ PyObject *func = _PyXIData_NewObject(call->func);
+ if (func == NULL) {
+ return -1;
+ }
+ // Unpack the args.
+ PyObject *args;
+ if (call->args == NULL) {
+ args = PyTuple_New(0);
+ if (args == NULL) {
+ Py_DECREF(func);
+ return -1;
+ }
+ }
+ else {
+ args = _PyXIData_NewObject(call->args);
+ if (args == NULL) {
+ Py_DECREF(func);
+ return -1;
+ }
+ assert(PyTuple_Check(args));
+ }
+ // Unpack the kwargs.
+ PyObject *kwargs = NULL;
+ if (call->kwargs != NULL) {
+ kwargs = _PyXIData_NewObject(call->kwargs);
+ if (kwargs == NULL) {
+ Py_DECREF(func);
+ Py_DECREF(args);
+ return -1;
+ }
+ assert(PyDict_Check(kwargs));
+ }
+ *p_func = func;
+ *p_args = args;
+ *p_kwargs = kwargs;
+ return 0;
+}
+
static int
-_run_script(_PyXIData_t *script, PyObject *ns)
+_make_call(struct interp_call *call,
+ PyObject **p_result, _PyXI_errcode *p_errcode)
+{
+ assert(call != NULL && call->func != NULL);
+ PyThreadState *tstate = _PyThreadState_GET();
+
+ // Get the func and args.
+ PyObject *func = NULL, *args = NULL, *kwargs = NULL;
+ if (_interp_call_unpack(call, &func, &args, &kwargs) < 0) {
+ assert(func == NULL);
+ assert(args == NULL);
+ assert(kwargs == NULL);
+ *p_errcode = is_notshareable_raised(tstate)
+ ? _PyXI_ERR_NOT_SHAREABLE
+ : _PyXI_ERR_OTHER;
+ return -1;
+ }
+ *p_errcode = _PyXI_ERR_NO_ERROR;
+
+ // Make the call.
+ PyObject *resobj = PyObject_Call(func, args, kwargs);
+ Py_DECREF(func);
+ Py_XDECREF(args);
+ Py_XDECREF(kwargs);
+ if (resobj == NULL) {
+ return -1;
+ }
+ *p_result = resobj;
+ return 0;
+}
+
+static int
+_run_script(_PyXIData_t *script, PyObject *ns, _PyXI_errcode *p_errcode)
{
PyObject *code = _PyXIData_NewObject(script);
if (code == NULL) {
+ *p_errcode = _PyXI_ERR_NOT_SHAREABLE;
return -1;
}
PyObject *result = PyEval_EvalCode(code, ns, ns);
Py_DECREF(code);
if (result == NULL) {
+ *p_errcode = _PyXI_ERR_UNCAUGHT_EXCEPTION;
return -1;
}
+ assert(result == Py_None);
Py_DECREF(result); // We throw away the result.
return 0;
}
+struct run_result {
+ PyObject *result;
+ PyObject *excinfo;
+};
+
+static void
+_run_result_clear(struct run_result *runres)
+{
+ Py_CLEAR(runres->result);
+ Py_CLEAR(runres->excinfo);
+}
+
static int
-_exec_in_interpreter(PyThreadState *tstate, PyInterpreterState *interp,
- _PyXIData_t *script, PyObject *shareables,
- PyObject **p_excinfo)
+_run_in_interpreter(PyThreadState *tstate, PyInterpreterState *interp,
+ _PyXIData_t *script, struct interp_call *call,
+ PyObject *shareables, struct run_result *runres)
{
assert(!_PyErr_Occurred(tstate));
_PyXI_session *session = _PyXI_NewSession();
if (session == NULL) {
return -1;
}
+ _PyXI_session_result result = {0};
// Prep and switch interpreters.
- if (_PyXI_Enter(session, interp, shareables) < 0) {
- if (_PyErr_Occurred(tstate)) {
- // If an error occured at this step, it means that interp
- // was not prepared and switched.
- _PyXI_FreeSession(session);
- return -1;
- }
- // Now, apply the error from another interpreter:
- PyObject *excinfo = _PyXI_ApplyCapturedException(session);
- if (excinfo != NULL) {
- *p_excinfo = excinfo;
- }
- assert(PyErr_Occurred());
+ if (_PyXI_Enter(session, interp, shareables, &result) < 0) {
+ // If an error occured at this step, it means that interp
+ // was not prepared and switched.
_PyXI_FreeSession(session);
+ assert(result.excinfo == NULL);
return -1;
}
- // Run the script.
+ // Run in the interpreter.
int res = -1;
- PyObject *mainns = _PyXI_GetMainNamespace(session);
- if (mainns == NULL) {
- goto finally;
+ _PyXI_errcode errcode = _PyXI_ERR_NO_ERROR;
+ if (script != NULL) {
+ assert(call == NULL);
+ PyObject *mainns = _PyXI_GetMainNamespace(session, &errcode);
+ if (mainns == NULL) {
+ goto finally;
+ }
+ res = _run_script(script, mainns, &errcode);
}
- res = _run_script(script, mainns);
+ else {
+ assert(call != NULL);
+ PyObject *resobj;
+ res = _make_call(call, &resobj, &errcode);
+ if (res == 0) {
+ res = _PyXI_Preserve(session, "resobj", resobj, &errcode);
+ Py_DECREF(resobj);
+ if (res < 0) {
+ goto finally;
+ }
+ }
+ }
+ int exitres;
finally:
// Clean up and switch back.
- _PyXI_Exit(session);
+ exitres = _PyXI_Exit(session, errcode, &result);
+ assert(res == 0 || exitres != 0);
+ _PyXI_FreeSession(session);
- // Propagate any exception out to the caller.
- assert(!PyErr_Occurred());
- if (res < 0) {
- PyObject *excinfo = _PyXI_ApplyCapturedException(session);
- if (excinfo != NULL) {
- *p_excinfo = excinfo;
- }
+ res = exitres;
+ if (_PyErr_Occurred(tstate)) {
+ assert(res < 0);
+ }
+ else if (res < 0) {
+ assert(result.excinfo != NULL);
+ runres->excinfo = Py_NewRef(result.excinfo);
+ res = -1;
}
else {
- assert(!_PyXI_HasCapturedException(session));
+ assert(result.excinfo == NULL);
+ runres->result = _PyXI_GetPreserved(&result, "resobj");
+ if (_PyErr_Occurred(tstate)) {
+ res = -1;
+ }
}
-
- _PyXI_FreeSession(session);
+ _PyXI_ClearResult(&result);
return res;
}
@@ -842,21 +1057,23 @@ interp_set___main___attrs(PyObject *self, PyObject *args, PyObject *kwargs)
}
// Prep and switch interpreters, including apply the updates.
- if (_PyXI_Enter(session, interp, updates) < 0) {
- if (!PyErr_Occurred()) {
- _PyXI_ApplyCapturedException(session);
- assert(PyErr_Occurred());
- }
- else {
- assert(!_PyXI_HasCapturedException(session));
- }
+ if (_PyXI_Enter(session, interp, updates, NULL) < 0) {
_PyXI_FreeSession(session);
return NULL;
}
// Clean up and switch back.
- _PyXI_Exit(session);
+ assert(!PyErr_Occurred());
+ int res = _PyXI_Exit(session, _PyXI_ERR_NO_ERROR, NULL);
_PyXI_FreeSession(session);
+ assert(res == 0);
+ if (res < 0) {
+ // unreachable
+ if (!PyErr_Occurred()) {
+ PyErr_SetString(PyExc_RuntimeError, "unresolved error");
+ }
+ return NULL;
+ }
Py_RETURN_NONE;
}
@@ -867,23 +1084,16 @@ PyDoc_STRVAR(set___main___attrs_doc,
Bind the given attributes in the interpreter's __main__ module.");
-static void
-unwrap_not_shareable(PyThreadState *tstate)
+static PyObject *
+_handle_script_error(struct run_result *runres)
{
- PyObject *exctype = _PyXIData_GetNotShareableErrorType(tstate);
- if (!_PyErr_ExceptionMatches(tstate, exctype)) {
- return;
- }
- PyObject *exc = _PyErr_GetRaisedException(tstate);
- PyObject *cause = PyException_GetCause(exc);
- if (cause != NULL) {
- Py_DECREF(exc);
- exc = cause;
- }
- else {
- assert(PyException_GetContext(exc) == NULL);
+ assert(runres->result == NULL);
+ if (runres->excinfo == NULL) {
+ assert(PyErr_Occurred());
+ return NULL;
}
- _PyErr_SetRaisedException(tstate, exc);
+ assert(!PyErr_Occurred());
+ return runres->excinfo;
}
static PyObject *
@@ -918,13 +1128,14 @@ interp_exec(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- PyObject *excinfo = NULL;
- int res = _exec_in_interpreter(tstate, interp, &xidata, shared, &excinfo);
+ struct run_result runres = {0};
+ int res = _run_in_interpreter(
+ tstate, interp, &xidata, NULL, shared, &runres);
_PyXIData_Release(&xidata);
if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
+ return _handle_script_error(&runres);
}
+ assert(runres.result == NULL);
Py_RETURN_NONE;
#undef FUNCNAME
}
@@ -981,13 +1192,14 @@ interp_run_string(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- PyObject *excinfo = NULL;
- int res = _exec_in_interpreter(tstate, interp, &xidata, shared, &excinfo);
+ struct run_result runres = {0};
+ int res = _run_in_interpreter(
+ tstate, interp, &xidata, NULL, shared, &runres);
_PyXIData_Release(&xidata);
if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
+ return _handle_script_error(&runres);
}
+ assert(runres.result == NULL);
Py_RETURN_NONE;
#undef FUNCNAME
}
@@ -1043,13 +1255,14 @@ interp_run_func(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- PyObject *excinfo = NULL;
- int res = _exec_in_interpreter(tstate, interp, &xidata, shared, &excinfo);
+ struct run_result runres = {0};
+ int res = _run_in_interpreter(
+ tstate, interp, &xidata, NULL, shared, &runres);
_PyXIData_Release(&xidata);
if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
+ return _handle_script_error(&runres);
}
+ assert(runres.result == NULL);
Py_RETURN_NONE;
#undef FUNCNAME
}
@@ -1069,15 +1282,18 @@ interp_call(PyObject *self, PyObject *args, PyObject *kwds)
#define FUNCNAME MODULE_NAME_STR ".call"
PyThreadState *tstate = _PyThreadState_GET();
static char *kwlist[] = {"id", "callable", "args", "kwargs",
- "restrict", NULL};
+ "preserve_exc", "restrict", NULL};
PyObject *id, *callable;
PyObject *args_obj = NULL;
PyObject *kwargs_obj = NULL;
+ int preserve_exc = 0;
int restricted = 0;
if (!PyArg_ParseTupleAndKeywords(args, kwds,
- "OO|OO$p:" FUNCNAME, kwlist,
- &id, &callable, &args_obj, &kwargs_obj,
- &restricted))
+ "OO|O!O!$pp:" FUNCNAME, kwlist,
+ &id, &callable,
+ &PyTuple_Type, &args_obj,
+ &PyDict_Type, &kwargs_obj,
+ &preserve_exc, &restricted))
{
return NULL;
}
@@ -1089,29 +1305,29 @@ interp_call(PyObject *self, PyObject *args, PyObject *kwds)
return NULL;
}
- if (args_obj != NULL) {
- _PyErr_SetString(tstate, PyExc_ValueError, "got unexpected args");
- return NULL;
- }
- if (kwargs_obj != NULL) {
- _PyErr_SetString(tstate, PyExc_ValueError, "got unexpected kwargs");
+ struct interp_call call = {0};
+ if (_interp_call_pack(tstate, &call, callable, args_obj, kwargs_obj) < 0) {
return NULL;
}
- _PyXIData_t xidata = {0};
- if (_PyCode_GetPureScriptXIData(tstate, callable, &xidata) < 0) {
- unwrap_not_shareable(tstate);
- return NULL;
+ PyObject *res_and_exc = NULL;
+ struct run_result runres = {0};
+ if (_run_in_interpreter(tstate, interp, NULL, &call, NULL, &runres) < 0) {
+ if (runres.excinfo == NULL) {
+ assert(_PyErr_Occurred(tstate));
+ goto finally;
+ }
+ assert(!_PyErr_Occurred(tstate));
}
+ assert(runres.result == NULL || runres.excinfo == NULL);
+ res_and_exc = Py_BuildValue("OO",
+ (runres.result ? runres.result : Py_None),
+ (runres.excinfo ? runres.excinfo : Py_None));
- PyObject *excinfo = NULL;
- int res = _exec_in_interpreter(tstate, interp, &xidata, NULL, &excinfo);
- _PyXIData_Release(&xidata);
- if (res < 0) {
- assert((excinfo == NULL) != (PyErr_Occurred() == NULL));
- return excinfo;
- }
- Py_RETURN_NONE;
+finally:
+ _interp_call_clear(&call);
+ _run_result_clear(&runres);
+ return res_and_exc;
#undef FUNCNAME
}
@@ -1119,13 +1335,7 @@ PyDoc_STRVAR(call_doc,
"call(id, callable, args=None, kwargs=None, *, restrict=False)\n\
\n\
Call the provided object in the identified interpreter.\n\
-Pass the given args and kwargs, if possible.\n\
-\n\
-\"callable\" may be a plain function with no free vars that takes\n\
-no arguments.\n\
-\n\
-The function's code object is used and all its state\n\
-is ignored, including its __globals__ dict.");
+Pass the given args and kwargs, if possible.");
static PyObject *
diff --git a/Modules/_io/textio.c b/Modules/_io/textio.c
index 86328e46a7b..3808ecdceb9 100644
--- a/Modules/_io/textio.c
+++ b/Modules/_io/textio.c
@@ -1578,6 +1578,8 @@ _io_TextIOWrapper_detach_impl(textio *self)
static int
_textiowrapper_writeflush(textio *self)
{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
+
if (self->pending_bytes == NULL)
return 0;
@@ -3173,8 +3175,9 @@ _io_TextIOWrapper_close_impl(textio *self)
}
static PyObject *
-textiowrapper_iternext(PyObject *op)
+textiowrapper_iternext_lock_held(PyObject *op)
{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(op);
PyObject *line;
textio *self = textio_CAST(op);
@@ -3210,6 +3213,16 @@ textiowrapper_iternext(PyObject *op)
return line;
}
+static PyObject *
+textiowrapper_iternext(PyObject *op)
+{
+ PyObject *result;
+ Py_BEGIN_CRITICAL_SECTION(op);
+ result = textiowrapper_iternext_lock_held(op);
+ Py_END_CRITICAL_SECTION();
+ return result;
+}
+
/*[clinic input]
@critical_section
@getter
diff --git a/Modules/_json.c b/Modules/_json.c
index 4aa6ae65065..6b5f6ea42df 100644
--- a/Modules/_json.c
+++ b/Modules/_json.c
@@ -360,13 +360,6 @@ _build_rval_index_tuple(PyObject *rval, Py_ssize_t idx) {
return tpl;
}
-static inline int
-_PyUnicodeWriter_IsEmpty(PyUnicodeWriter *writer_pub)
-{
- _PyUnicodeWriter *writer = (_PyUnicodeWriter*)writer_pub;
- return (writer->pos == 0);
-}
-
static PyObject *
scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next_end_ptr)
{
@@ -385,10 +378,7 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
const void *buf;
int kind;
- PyUnicodeWriter *writer = PyUnicodeWriter_Create(0);
- if (writer == NULL) {
- goto bail;
- }
+ PyUnicodeWriter *writer = NULL;
len = PyUnicode_GET_LENGTH(pystr);
buf = PyUnicode_DATA(pystr);
@@ -419,12 +409,11 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
if (c == '"') {
// Fast path for simple case.
- if (_PyUnicodeWriter_IsEmpty(writer)) {
+ if (writer == NULL) {
PyObject *ret = PyUnicode_Substring(pystr, end, next);
if (ret == NULL) {
goto bail;
}
- PyUnicodeWriter_Discard(writer);
*next_end_ptr = next + 1;;
return ret;
}
@@ -432,6 +421,11 @@ scanstring_unicode(PyObject *pystr, Py_ssize_t end, int strict, Py_ssize_t *next
else if (c != '\\') {
raise_errmsg("Unterminated string starting at", pystr, begin);
goto bail;
+ } else if (writer == NULL) {
+ writer = PyUnicodeWriter_Create(0);
+ if (writer == NULL) {
+ goto bail;
+ }
}
/* Pick up this chunk if it's not zero length */
@@ -1609,6 +1603,12 @@ encoder_encode_key_value(PyEncoderObject *s, PyUnicodeWriter *writer, bool *firs
if (*first) {
*first = false;
+ if (s->indent != Py_None) {
+ if (write_newline_indent(writer, indent_level, indent_cache) < 0) {
+ Py_DECREF(keystr);
+ return -1;
+ }
+ }
}
else {
if (PyUnicodeWriter_WriteStr(writer, item_separator) < 0) {
@@ -1676,11 +1676,8 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer,
if (s->indent != Py_None) {
indent_level++;
separator = get_item_separator(s, indent_level, indent_cache);
- if (separator == NULL ||
- write_newline_indent(writer, indent_level, indent_cache) < 0)
- {
+ if (separator == NULL)
goto bail;
- }
}
if (s->sort_keys || !PyDict_CheckExact(dct)) {
@@ -1720,7 +1717,7 @@ encoder_listencode_dict(PyEncoderObject *s, PyUnicodeWriter *writer,
goto bail;
Py_CLEAR(ident);
}
- if (s->indent != Py_None) {
+ if (s->indent != Py_None && !first) {
indent_level--;
if (write_newline_indent(writer, indent_level, indent_cache) < 0) {
goto bail;
diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c
index d5bac2f5b78..2f4f388ce11 100644
--- a/Modules/_randommodule.c
+++ b/Modules/_randommodule.c
@@ -497,34 +497,32 @@ _random_Random_setstate_impl(RandomObject *self, PyObject *state)
_random.Random.getrandbits
self: self(type="RandomObject *")
- k: int
+ k: uint64
/
getrandbits(k) -> x. Generates an int with k random bits.
[clinic start generated code]*/
static PyObject *
-_random_Random_getrandbits_impl(RandomObject *self, int k)
-/*[clinic end generated code: output=b402f82a2158887f input=87603cd60f79f730]*/
+_random_Random_getrandbits_impl(RandomObject *self, uint64_t k)
+/*[clinic end generated code: output=c30ef8435f3433cf input=64226ac13bb4d2a3]*/
{
- int i, words;
+ Py_ssize_t i, words;
uint32_t r;
uint32_t *wordarray;
PyObject *result;
- if (k < 0) {
- PyErr_SetString(PyExc_ValueError,
- "number of bits must be non-negative");
- return NULL;
- }
-
if (k == 0)
return PyLong_FromLong(0);
if (k <= 32) /* Fast path */
return PyLong_FromUnsignedLong(genrand_uint32(self) >> (32 - k));
- words = (k - 1) / 32 + 1;
+ if ((k - 1u) / 32u + 1u > PY_SSIZE_T_MAX / 4u) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ words = (k - 1u) / 32u + 1u;
wordarray = (uint32_t *)PyMem_Malloc(words * 4);
if (wordarray == NULL) {
PyErr_NoMemory();
diff --git a/Modules/_remote_debugging_module.c b/Modules/_remote_debugging_module.c
index 86e269f3124..ea58f38006e 100644
--- a/Modules/_remote_debugging_module.c
+++ b/Modules/_remote_debugging_module.c
@@ -1562,9 +1562,9 @@ done_tlbc:
Py_INCREF(meta->func_name);
Py_INCREF(meta->file_name);
- PyTuple_SET_ITEM(tuple, 0, meta->func_name);
- PyTuple_SET_ITEM(tuple, 1, meta->file_name);
- PyTuple_SET_ITEM(tuple, 2, lineno);
+ PyTuple_SET_ITEM(tuple, 0, meta->file_name);
+ PyTuple_SET_ITEM(tuple, 1, lineno);
+ PyTuple_SET_ITEM(tuple, 2, meta->func_name);
*result = tuple;
return 0;
@@ -2921,4 +2921,4 @@ PyMODINIT_FUNC
PyInit__remote_debugging(void)
{
return PyModuleDef_Init(&remote_debugging_module);
-} \ No newline at end of file
+}
diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c
index 136e6a7a015..845c218e679 100644
--- a/Modules/_testinternalcapi.c
+++ b/Modules/_testinternalcapi.c
@@ -1045,6 +1045,9 @@ get_code_var_counts(PyObject *self, PyObject *_args, PyObject *_kwargs)
#define SET_COUNT(DICT, STRUCT, NAME) \
do { \
PyObject *count = PyLong_FromLong(STRUCT.NAME); \
+ if (count == NULL) { \
+ goto error; \
+ } \
int res = PyDict_SetItemString(DICT, #NAME, count); \
Py_DECREF(count); \
if (res < 0) { \
diff --git a/Modules/_zstd/_zstdmodule.c b/Modules/_zstd/_zstdmodule.c
index 5ad697d2b83..d75c0779474 100644
--- a/Modules/_zstd/_zstdmodule.c
+++ b/Modules/_zstd/_zstdmodule.c
@@ -7,7 +7,6 @@
#include "Python.h"
#include "_zstdmodule.h"
-#include "zstddict.h"
#include <zstd.h> // ZSTD_*()
#include <zdict.h> // ZDICT_*()
@@ -20,14 +19,52 @@ module _zstd
#include "clinic/_zstdmodule.c.h"
+ZstdDict *
+_Py_parse_zstd_dict(const _zstd_state *state, PyObject *dict, int *ptype)
+{
+ if (state == NULL) {
+ return NULL;
+ }
+
+ /* Check ZstdDict */
+ if (PyObject_TypeCheck(dict, state->ZstdDict_type)) {
+ return (ZstdDict*)dict;
+ }
+
+ /* Check (ZstdDict, type) */
+ if (PyTuple_CheckExact(dict) && PyTuple_GET_SIZE(dict) == 2
+ && PyObject_TypeCheck(PyTuple_GET_ITEM(dict, 0), state->ZstdDict_type)
+ && PyLong_Check(PyTuple_GET_ITEM(dict, 1)))
+ {
+ int type = PyLong_AsInt(PyTuple_GET_ITEM(dict, 1));
+ if (type == -1 && PyErr_Occurred()) {
+ return NULL;
+ }
+ if (type == DICT_TYPE_DIGESTED
+ || type == DICT_TYPE_UNDIGESTED
+ || type == DICT_TYPE_PREFIX)
+ {
+ *ptype = type;
+ return (ZstdDict*)PyTuple_GET_ITEM(dict, 0);
+ }
+ }
+
+ /* Wrong type */
+ PyErr_SetString(PyExc_TypeError,
+ "zstd_dict argument should be a ZstdDict object.");
+ return NULL;
+}
+
/* Format error message and set ZstdError. */
void
-set_zstd_error(const _zstd_state* const state,
- error_type type, size_t zstd_ret)
+set_zstd_error(const _zstd_state *state, error_type type, size_t zstd_ret)
{
- char *msg;
+ const char *msg;
assert(ZSTD_isError(zstd_ret));
+ if (state == NULL) {
+ return;
+ }
switch (type) {
case ERR_DECOMPRESS:
msg = "Unable to decompress Zstandard data: %s";
@@ -35,6 +72,9 @@ set_zstd_error(const _zstd_state* const state,
case ERR_COMPRESS:
msg = "Unable to compress Zstandard data: %s";
break;
+ case ERR_SET_PLEDGED_INPUT_SIZE:
+ msg = "Unable to set pledged uncompressed content size: %s";
+ break;
case ERR_LOAD_D_DICT:
msg = "Unable to load Zstandard dictionary or prefix for "
@@ -174,7 +214,7 @@ calculate_samples_stats(PyBytesObject *samples_bytes, PyObject *samples_sizes,
Py_ssize_t sizes_sum;
Py_ssize_t i;
- chunks_number = Py_SIZE(samples_sizes);
+ chunks_number = PyTuple_GET_SIZE(samples_sizes);
if ((size_t) chunks_number > UINT32_MAX) {
PyErr_Format(PyExc_ValueError,
"The number of samples should be <= %u.", UINT32_MAX);
@@ -188,20 +228,24 @@ calculate_samples_stats(PyBytesObject *samples_bytes, PyObject *samples_sizes,
return -1;
}
- sizes_sum = 0;
+ sizes_sum = PyBytes_GET_SIZE(samples_bytes);
for (i = 0; i < chunks_number; i++) {
- PyObject *size = PyTuple_GetItem(samples_sizes, i);
- (*chunk_sizes)[i] = PyLong_AsSize_t(size);
- if ((*chunk_sizes)[i] == (size_t)-1 && PyErr_Occurred()) {
- PyErr_Format(PyExc_ValueError,
- "Items in samples_sizes should be an int "
- "object, with a value between 0 and %u.", SIZE_MAX);
+ size_t size = PyLong_AsSize_t(PyTuple_GET_ITEM(samples_sizes, i));
+ (*chunk_sizes)[i] = size;
+ if (size == (size_t)-1 && PyErr_Occurred()) {
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ goto sum_error;
+ }
return -1;
}
- sizes_sum += (*chunk_sizes)[i];
+ if ((size_t)sizes_sum < size) {
+ goto sum_error;
+ }
+ sizes_sum -= size;
}
- if (sizes_sum != Py_SIZE(samples_bytes)) {
+ if (sizes_sum != 0) {
+sum_error:
PyErr_SetString(PyExc_ValueError,
"The samples size tuple doesn't match the "
"concatenation's size.");
@@ -257,7 +301,7 @@ _zstd_train_dict_impl(PyObject *module, PyBytesObject *samples_bytes,
/* Train the dictionary */
char *dst_dict_buffer = PyBytes_AS_STRING(dst_dict_bytes);
- char *samples_buffer = PyBytes_AS_STRING(samples_bytes);
+ const char *samples_buffer = PyBytes_AS_STRING(samples_bytes);
Py_BEGIN_ALLOW_THREADS
zstd_ret = ZDICT_trainFromBuffer(dst_dict_buffer, dict_size,
samples_buffer,
@@ -507,20 +551,10 @@ _zstd_set_parameter_types_impl(PyObject *module, PyObject *c_parameter_type,
{
_zstd_state* mod_state = get_zstd_state(module);
- if (!PyType_Check(c_parameter_type) || !PyType_Check(d_parameter_type)) {
- PyErr_SetString(PyExc_ValueError,
- "The two arguments should be CompressionParameter and "
- "DecompressionParameter types.");
- return NULL;
- }
-
- Py_XDECREF(mod_state->CParameter_type);
Py_INCREF(c_parameter_type);
- mod_state->CParameter_type = (PyTypeObject*)c_parameter_type;
-
- Py_XDECREF(mod_state->DParameter_type);
+ Py_XSETREF(mod_state->CParameter_type, (PyTypeObject*)c_parameter_type);
Py_INCREF(d_parameter_type);
- mod_state->DParameter_type = (PyTypeObject*)d_parameter_type;
+ Py_XSETREF(mod_state->DParameter_type, (PyTypeObject*)d_parameter_type);
Py_RETURN_NONE;
}
@@ -583,7 +617,6 @@ do { \
return -1;
}
if (PyModule_AddType(m, (PyTypeObject *)mod_state->ZstdError) < 0) {
- Py_DECREF(mod_state->ZstdError);
return -1;
}
diff --git a/Modules/_zstd/_zstdmodule.h b/Modules/_zstd/_zstdmodule.h
index 1f4160f474f..4e8f708f223 100644
--- a/Modules/_zstd/_zstdmodule.h
+++ b/Modules/_zstd/_zstdmodule.h
@@ -5,6 +5,8 @@
#ifndef ZSTD_MODULE_H
#define ZSTD_MODULE_H
+#include "zstddict.h"
+
/* Type specs */
extern PyType_Spec zstd_dict_type_spec;
extern PyType_Spec zstd_compressor_type_spec;
@@ -25,6 +27,7 @@ typedef struct {
typedef enum {
ERR_DECOMPRESS,
ERR_COMPRESS,
+ ERR_SET_PLEDGED_INPUT_SIZE,
ERR_LOAD_D_DICT,
ERR_LOAD_C_DICT,
@@ -43,10 +46,14 @@ typedef enum {
DICT_TYPE_PREFIX = 2
} dictionary_type;
+extern ZstdDict *
+_Py_parse_zstd_dict(const _zstd_state *state,
+ PyObject *dict, int *type);
+
/* Format error message and set ZstdError. */
extern void
-set_zstd_error(const _zstd_state* const state,
- const error_type type, size_t zstd_ret);
+set_zstd_error(const _zstd_state *state,
+ error_type type, size_t zstd_ret);
extern void
set_parameter_error(int is_compress, int key_v, int value_v);
diff --git a/Modules/_zstd/clinic/compressor.c.h b/Modules/_zstd/clinic/compressor.c.h
index f69161b590e..4f8d93fd9e8 100644
--- a/Modules/_zstd/clinic/compressor.c.h
+++ b/Modules/_zstd/clinic/compressor.c.h
@@ -252,4 +252,43 @@ skip_optional_pos:
exit:
return return_value;
}
-/*[clinic end generated code: output=ee2d1dc298de790c input=a9049054013a1b77]*/
+
+PyDoc_STRVAR(_zstd_ZstdCompressor_set_pledged_input_size__doc__,
+"set_pledged_input_size($self, size, /)\n"
+"--\n"
+"\n"
+"Set the uncompressed content size to be written into the frame header.\n"
+"\n"
+" size\n"
+" The size of the uncompressed data to be provided to the compressor.\n"
+"\n"
+"This method can be used to ensure the header of the frame about to be written\n"
+"includes the size of the data, unless the CompressionParameter.content_size_flag\n"
+"is set to False. If last_mode != FLUSH_FRAME, then a RuntimeError is raised.\n"
+"\n"
+"It is important to ensure that the pledged data size matches the actual data\n"
+"size. If they do not match the compressed output data may be corrupted and the\n"
+"final chunk written may be lost.");
+
+#define _ZSTD_ZSTDCOMPRESSOR_SET_PLEDGED_INPUT_SIZE_METHODDEF \
+ {"set_pledged_input_size", (PyCFunction)_zstd_ZstdCompressor_set_pledged_input_size, METH_O, _zstd_ZstdCompressor_set_pledged_input_size__doc__},
+
+static PyObject *
+_zstd_ZstdCompressor_set_pledged_input_size_impl(ZstdCompressor *self,
+ unsigned long long size);
+
+static PyObject *
+_zstd_ZstdCompressor_set_pledged_input_size(PyObject *self, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ unsigned long long size;
+
+ if (!zstd_contentsize_converter(arg, &size)) {
+ goto exit;
+ }
+ return_value = _zstd_ZstdCompressor_set_pledged_input_size_impl((ZstdCompressor *)self, size);
+
+exit:
+ return return_value;
+}
+/*[clinic end generated code: output=c1d5c2cf06a8becd input=a9049054013a1b77]*/
diff --git a/Modules/_zstd/compressor.c b/Modules/_zstd/compressor.c
index 0fc3d7d36c6..bc9e6eff89a 100644
--- a/Modules/_zstd/compressor.c
+++ b/Modules/_zstd/compressor.c
@@ -16,7 +16,6 @@ class _zstd.ZstdCompressor "ZstdCompressor *" "&zstd_compressor_type_spec"
#include "_zstdmodule.h"
#include "buffer.h"
-#include "zstddict.h"
#include "internal/pycore_lock.h" // PyMutex_IsLocked
#include <stddef.h> // offsetof()
@@ -46,6 +45,52 @@ typedef struct {
#define ZstdCompressor_CAST(op) ((ZstdCompressor *)op)
+/*[python input]
+
+class zstd_contentsize_converter(CConverter):
+ type = 'unsigned long long'
+ converter = 'zstd_contentsize_converter'
+
+[python start generated code]*/
+/*[python end generated code: output=da39a3ee5e6b4b0d input=0932c350d633c7de]*/
+
+
+static int
+zstd_contentsize_converter(PyObject *size, unsigned long long *p)
+{
+ // None means the user indicates the size is unknown.
+ if (size == Py_None) {
+ *p = ZSTD_CONTENTSIZE_UNKNOWN;
+ }
+ else {
+ /* ZSTD_CONTENTSIZE_UNKNOWN is 0ULL - 1
+ ZSTD_CONTENTSIZE_ERROR is 0ULL - 2
+ Users should only pass values < ZSTD_CONTENTSIZE_ERROR */
+ unsigned long long pledged_size = PyLong_AsUnsignedLongLong(size);
+ /* Here we check for (unsigned long long)-1 as a sign of an error in
+ PyLong_AsUnsignedLongLong */
+ if (pledged_size == (unsigned long long)-1 && PyErr_Occurred()) {
+ *p = ZSTD_CONTENTSIZE_ERROR;
+ if (PyErr_ExceptionMatches(PyExc_OverflowError)) {
+ PyErr_Format(PyExc_ValueError,
+ "size argument should be a positive int less "
+ "than %ull", ZSTD_CONTENTSIZE_ERROR);
+ return 0;
+ }
+ return 0;
+ }
+ if (pledged_size >= ZSTD_CONTENTSIZE_ERROR) {
+ *p = ZSTD_CONTENTSIZE_ERROR;
+ PyErr_Format(PyExc_ValueError,
+ "size argument should be a positive int less "
+ "than %ull", ZSTD_CONTENTSIZE_ERROR);
+ return 0;
+ }
+ *p = pledged_size;
+ }
+ return 1;
+}
+
#include "clinic/compressor.c.h"
static int
@@ -71,9 +116,6 @@ _zstd_set_c_level(ZstdCompressor *self, int level)
/* Check error */
if (ZSTD_isError(zstd_ret)) {
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state == NULL) {
- return -1;
- }
set_zstd_error(mod_state, ERR_SET_C_LEVEL, zstd_ret);
return -1;
}
@@ -265,56 +307,17 @@ static int
_zstd_load_c_dict(ZstdCompressor *self, PyObject *dict)
{
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state == NULL) {
- return -1;
- }
- ZstdDict *zd;
- int type, ret;
-
- /* Check ZstdDict */
- ret = PyObject_IsInstance(dict, (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
+ /* When compressing, use undigested dictionary by default. */
+ int type = DICT_TYPE_UNDIGESTED;
+ ZstdDict *zd = _Py_parse_zstd_dict(mod_state, dict, &type);
+ if (zd == NULL) {
return -1;
}
- else if (ret > 0) {
- /* When compressing, use undigested dictionary by default. */
- zd = (ZstdDict*)dict;
- type = DICT_TYPE_UNDIGESTED;
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
-
- /* Check (ZstdDict, type) */
- if (PyTuple_CheckExact(dict) && PyTuple_GET_SIZE(dict) == 2) {
- /* Check ZstdDict */
- ret = PyObject_IsInstance(PyTuple_GET_ITEM(dict, 0),
- (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
- return -1;
- }
- else if (ret > 0) {
- /* type == -1 may indicate an error. */
- type = PyLong_AsInt(PyTuple_GET_ITEM(dict, 1));
- if (type == DICT_TYPE_DIGESTED
- || type == DICT_TYPE_UNDIGESTED
- || type == DICT_TYPE_PREFIX)
- {
- assert(type >= 0);
- zd = (ZstdDict*)PyTuple_GET_ITEM(dict, 0);
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
- }
- }
-
- /* Wrong type */
- PyErr_SetString(PyExc_TypeError,
- "zstd_dict argument should be ZstdDict object.");
- return -1;
+ int ret;
+ PyMutex_Lock(&zd->lock);
+ ret = _zstd_load_impl(self, zd, mod_state, type);
+ PyMutex_Unlock(&zd->lock);
+ return ret;
}
/*[clinic input]
@@ -481,9 +484,7 @@ compress_lock_held(ZstdCompressor *self, Py_buffer *data,
/* Check error */
if (ZSTD_isError(zstd_ret)) {
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state != NULL) {
- set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
- }
+ set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
goto error;
}
@@ -512,7 +513,7 @@ error:
return NULL;
}
-#ifdef Py_DEBUG
+#ifndef NDEBUG
static inline int
mt_continue_should_break(ZSTD_inBuffer *in, ZSTD_outBuffer *out)
{
@@ -553,9 +554,7 @@ compress_mt_continue_lock_held(ZstdCompressor *self, Py_buffer *data)
/* Check error */
if (ZSTD_isError(zstd_ret)) {
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state != NULL) {
- set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
- }
+ set_zstd_error(mod_state, ERR_COMPRESS, zstd_ret);
goto error;
}
@@ -690,9 +689,61 @@ _zstd_ZstdCompressor_flush_impl(ZstdCompressor *self, int mode)
return ret;
}
+
+/*[clinic input]
+_zstd.ZstdCompressor.set_pledged_input_size
+
+ size: zstd_contentsize
+ The size of the uncompressed data to be provided to the compressor.
+ /
+
+Set the uncompressed content size to be written into the frame header.
+
+This method can be used to ensure the header of the frame about to be written
+includes the size of the data, unless the CompressionParameter.content_size_flag
+is set to False. If last_mode != FLUSH_FRAME, then a RuntimeError is raised.
+
+It is important to ensure that the pledged data size matches the actual data
+size. If they do not match the compressed output data may be corrupted and the
+final chunk written may be lost.
+[clinic start generated code]*/
+
+static PyObject *
+_zstd_ZstdCompressor_set_pledged_input_size_impl(ZstdCompressor *self,
+ unsigned long long size)
+/*[clinic end generated code: output=3a09e55cc0e3b4f9 input=afd8a7d78cff2eb5]*/
+{
+ // Error occured while converting argument, should be unreachable
+ assert(size != ZSTD_CONTENTSIZE_ERROR);
+
+ /* Thread-safe code */
+ PyMutex_Lock(&self->lock);
+
+ /* Check the current mode */
+ if (self->last_mode != ZSTD_e_end) {
+ PyErr_SetString(PyExc_ValueError,
+ "set_pledged_input_size() method must be called "
+ "when last_mode == FLUSH_FRAME");
+ PyMutex_Unlock(&self->lock);
+ return NULL;
+ }
+
+ /* Set pledged content size */
+ size_t zstd_ret = ZSTD_CCtx_setPledgedSrcSize(self->cctx, size);
+ PyMutex_Unlock(&self->lock);
+ if (ZSTD_isError(zstd_ret)) {
+ _zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
+ set_zstd_error(mod_state, ERR_SET_PLEDGED_INPUT_SIZE, zstd_ret);
+ return NULL;
+ }
+
+ Py_RETURN_NONE;
+}
+
static PyMethodDef ZstdCompressor_methods[] = {
_ZSTD_ZSTDCOMPRESSOR_COMPRESS_METHODDEF
_ZSTD_ZSTDCOMPRESSOR_FLUSH_METHODDEF
+ _ZSTD_ZSTDCOMPRESSOR_SET_PLEDGED_INPUT_SIZE_METHODDEF
{NULL, NULL}
};
diff --git a/Modules/_zstd/decompressor.c b/Modules/_zstd/decompressor.c
index 26e568cf433..c53d6e4cb05 100644
--- a/Modules/_zstd/decompressor.c
+++ b/Modules/_zstd/decompressor.c
@@ -16,7 +16,6 @@ class _zstd.ZstdDecompressor "ZstdDecompressor *" "&zstd_decompressor_type_spec"
#include "_zstdmodule.h"
#include "buffer.h"
-#include "zstddict.h"
#include "internal/pycore_lock.h" // PyMutex_IsLocked
#include <stdbool.h> // bool
@@ -61,11 +60,6 @@ _get_DDict(ZstdDict *self)
assert(PyMutex_IsLocked(&self->lock));
ZSTD_DDict *ret;
- /* Already created */
- if (self->d_dict != NULL) {
- return self->d_dict;
- }
-
if (self->d_dict == NULL) {
/* Create ZSTD_DDict instance from dictionary content */
Py_BEGIN_ALLOW_THREADS
@@ -182,56 +176,17 @@ static int
_zstd_load_d_dict(ZstdDecompressor *self, PyObject *dict)
{
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state == NULL) {
- return -1;
- }
- ZstdDict *zd;
- int type, ret;
-
- /* Check ZstdDict */
- ret = PyObject_IsInstance(dict, (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
+ /* When decompressing, use digested dictionary by default. */
+ int type = DICT_TYPE_DIGESTED;
+ ZstdDict *zd = _Py_parse_zstd_dict(mod_state, dict, &type);
+ if (zd == NULL) {
return -1;
}
- else if (ret > 0) {
- /* When decompressing, use digested dictionary by default. */
- zd = (ZstdDict*)dict;
- type = DICT_TYPE_DIGESTED;
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
-
- /* Check (ZstdDict, type) */
- if (PyTuple_CheckExact(dict) && PyTuple_GET_SIZE(dict) == 2) {
- /* Check ZstdDict */
- ret = PyObject_IsInstance(PyTuple_GET_ITEM(dict, 0),
- (PyObject*)mod_state->ZstdDict_type);
- if (ret < 0) {
- return -1;
- }
- else if (ret > 0) {
- /* type == -1 may indicate an error. */
- type = PyLong_AsInt(PyTuple_GET_ITEM(dict, 1));
- if (type == DICT_TYPE_DIGESTED
- || type == DICT_TYPE_UNDIGESTED
- || type == DICT_TYPE_PREFIX)
- {
- assert(type >= 0);
- zd = (ZstdDict*)PyTuple_GET_ITEM(dict, 0);
- PyMutex_Lock(&zd->lock);
- ret = _zstd_load_impl(self, zd, mod_state, type);
- PyMutex_Unlock(&zd->lock);
- return ret;
- }
- }
- }
-
- /* Wrong type */
- PyErr_SetString(PyExc_TypeError,
- "zstd_dict argument should be ZstdDict object.");
- return -1;
+ int ret;
+ PyMutex_Lock(&zd->lock);
+ ret = _zstd_load_impl(self, zd, mod_state, type);
+ PyMutex_Unlock(&zd->lock);
+ return ret;
}
/*
@@ -282,9 +237,7 @@ decompress_lock_held(ZstdDecompressor *self, ZSTD_inBuffer *in,
/* Check error */
if (ZSTD_isError(zstd_ret)) {
_zstd_state* mod_state = PyType_GetModuleState(Py_TYPE(self));
- if (mod_state != NULL) {
- set_zstd_error(mod_state, ERR_DECOMPRESS, zstd_ret);
- }
+ set_zstd_error(mod_state, ERR_DECOMPRESS, zstd_ret);
goto error;
}
diff --git a/Modules/_zstd/zstddict.c b/Modules/_zstd/zstddict.c
index afc58b42e89..14f74aaed46 100644
--- a/Modules/_zstd/zstddict.c
+++ b/Modules/_zstd/zstddict.c
@@ -15,7 +15,6 @@ class _zstd.ZstdDict "ZstdDict *" "&zstd_dict_type_spec"
#include "Python.h"
#include "_zstdmodule.h"
-#include "zstddict.h"
#include "clinic/zstddict.c.h"
#include "internal/pycore_lock.h" // PyMutex_IsLocked
diff --git a/Modules/blake2module.c b/Modules/blake2module.c
index f9acc57f1b2..07aa89f573f 100644
--- a/Modules/blake2module.c
+++ b/Modules/blake2module.c
@@ -655,8 +655,7 @@ error:
/*[clinic input]
@classmethod
_blake2.blake2b.__new__ as py_blake2b_new
- data: object(c_default="NULL") = b''
- /
+ data as data_obj: object(c_default="NULL") = b''
*
digest_size: int(c_default="HACL_HASH_BLAKE2B_OUT_BYTES") = _blake2.blake2b.MAX_DIGEST_SIZE
key: Py_buffer(c_default="NULL", py_default="b''") = None
@@ -670,26 +669,31 @@ _blake2.blake2b.__new__ as py_blake2b_new
inner_size: int = 0
last_node: bool = False
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new BLAKE2b hash object.
[clinic start generated code]*/
static PyObject *
-py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2b_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity)
-/*[clinic end generated code: output=32bfd8f043c6896f input=8fee2b7b11428b2d]*/
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string)
+/*[clinic end generated code: output=de64bd850606b6a0 input=78cf60a2922d2f90]*/
{
+ PyObject *data;
+ if (_Py_hashlib_data_argument(&data, data_obj, string) < 0) {
+ return NULL;
+ }
return py_blake2b_or_s_new(type, data, digest_size, key, salt, person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
}
/*[clinic input]
@classmethod
_blake2.blake2s.__new__ as py_blake2s_new
- data: object(c_default="NULL") = b''
- /
+ data as data_obj: object(c_default="NULL") = b''
*
digest_size: int(c_default="HACL_HASH_BLAKE2S_OUT_BYTES") = _blake2.blake2s.MAX_DIGEST_SIZE
key: Py_buffer(c_default="NULL", py_default="b''") = None
@@ -703,18 +707,24 @@ _blake2.blake2s.__new__ as py_blake2s_new
inner_size: int = 0
last_node: bool = False
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new BLAKE2s hash object.
[clinic start generated code]*/
static PyObject *
-py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2s_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity)
-/*[clinic end generated code: output=556181f73905c686 input=8165a11980eac7f3]*/
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string)
+/*[clinic end generated code: output=582a0c4295cc3a3c input=6843d6332eefd295]*/
{
+ PyObject *data;
+ if (_Py_hashlib_data_argument(&data, data_obj, string) < 0) {
+ return NULL;
+ }
return py_blake2b_or_s_new(type, data, digest_size, key, salt, person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
}
diff --git a/Modules/clinic/_hashopenssl.c.h b/Modules/clinic/_hashopenssl.c.h
index b2f6b25a235..61ea10e2a48 100644
--- a/Modules/clinic/_hashopenssl.c.h
+++ b/Modules/clinic/_hashopenssl.c.h
@@ -233,7 +233,7 @@ exit:
#endif /* defined(PY_OPENSSL_HAS_SHAKE) */
PyDoc_STRVAR(_hashlib_HASH_new__doc__,
-"new($module, /, name, string=b\'\', *, usedforsecurity=True)\n"
+"new($module, /, name, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new hash object using the named algorithm.\n"
@@ -247,8 +247,8 @@ PyDoc_STRVAR(_hashlib_HASH_new__doc__,
{"new", _PyCFunction_CAST(_hashlib_HASH_new), METH_FASTCALL|METH_KEYWORDS, _hashlib_HASH_new__doc__},
static PyObject *
-_hashlib_HASH_new_impl(PyObject *module, const char *name,
- PyObject *data_obj, int usedforsecurity);
+_hashlib_HASH_new_impl(PyObject *module, const char *name, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_HASH_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -256,7 +256,7 @@ _hashlib_HASH_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 3
+ #define NUM_KEYWORDS 4
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -265,7 +265,7 @@ _hashlib_HASH_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(name), &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(name), &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -274,18 +274,19 @@ _hashlib_HASH_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"name", "string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"name", "data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "new",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[3];
+ PyObject *argsbuf[4];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1;
const char *name;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 1, /*maxpos*/ 2, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -309,7 +310,7 @@ _hashlib_HASH_new(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyO
goto skip_optional_pos;
}
if (args[1]) {
- data_obj = args[1];
+ data = args[1];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -318,19 +319,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[2]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[2]) {
+ usedforsecurity = PyObject_IsTrue(args[2]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[3];
skip_optional_kwonly:
- return_value = _hashlib_HASH_new_impl(module, name, data_obj, usedforsecurity);
+ return_value = _hashlib_HASH_new_impl(module, name, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_md5__doc__,
-"openssl_md5($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_md5($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Returns a md5 hash object; optionally initialized with a string");
@@ -339,8 +346,8 @@ PyDoc_STRVAR(_hashlib_openssl_md5__doc__,
{"openssl_md5", _PyCFunction_CAST(_hashlib_openssl_md5), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_md5__doc__},
static PyObject *
-_hashlib_openssl_md5_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_md5_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -348,7 +355,7 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -357,7 +364,7 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -366,17 +373,18 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_md5",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -387,7 +395,7 @@ _hashlib_openssl_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -396,19 +404,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_md5_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_md5_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha1__doc__,
-"openssl_sha1($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha1($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Returns a sha1 hash object; optionally initialized with a string");
@@ -417,8 +431,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha1__doc__,
{"openssl_sha1", _PyCFunction_CAST(_hashlib_openssl_sha1), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha1__doc__},
static PyObject *
-_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha1_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -426,7 +440,7 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -435,7 +449,7 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -444,17 +458,18 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha1",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -465,7 +480,7 @@ _hashlib_openssl_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs,
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -474,19 +489,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha1_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha1_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha224__doc__,
-"openssl_sha224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha224($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha224 hash object; optionally initialized with a string");
@@ -495,8 +517,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha224__doc__,
{"openssl_sha224", _PyCFunction_CAST(_hashlib_openssl_sha224), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha224__doc__},
static PyObject *
-_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -504,7 +526,7 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -513,7 +535,7 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -522,17 +544,18 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -543,7 +566,7 @@ _hashlib_openssl_sha224(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -552,19 +575,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha224_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha224_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha256__doc__,
-"openssl_sha256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha256($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha256 hash object; optionally initialized with a string");
@@ -573,8 +603,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha256__doc__,
{"openssl_sha256", _PyCFunction_CAST(_hashlib_openssl_sha256), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha256__doc__},
static PyObject *
-_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -582,7 +612,7 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -591,7 +621,7 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -600,17 +630,18 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -621,7 +652,7 @@ _hashlib_openssl_sha256(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -630,19 +661,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha256_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha256_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha384__doc__,
-"openssl_sha384($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha384($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha384 hash object; optionally initialized with a string");
@@ -651,8 +689,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha384__doc__,
{"openssl_sha384", _PyCFunction_CAST(_hashlib_openssl_sha384), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha384__doc__},
static PyObject *
-_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -660,7 +698,7 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -669,7 +707,7 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -678,17 +716,18 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha384",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -699,7 +738,7 @@ _hashlib_openssl_sha384(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -708,19 +747,26 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha384_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha384_impl(module, data, usedforsecurity, string);
exit:
return return_value;
}
PyDoc_STRVAR(_hashlib_openssl_sha512__doc__,
-"openssl_sha512($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha512($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha512 hash object; optionally initialized with a string");
@@ -729,8 +775,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha512__doc__,
{"openssl_sha512", _PyCFunction_CAST(_hashlib_openssl_sha512), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha512__doc__},
static PyObject *
-_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -738,7 +784,7 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -747,7 +793,7 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -756,17 +802,18 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha512",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -777,7 +824,7 @@ _hashlib_openssl_sha512(PyObject *module, PyObject *const *args, Py_ssize_t narg
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -786,12 +833,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha512_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha512_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -800,7 +853,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__,
-"openssl_sha3_224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_224($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-224 hash object; optionally initialized with a string");
@@ -809,8 +863,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_224__doc__,
{"openssl_sha3_224", _PyCFunction_CAST(_hashlib_openssl_sha3_224), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_224__doc__},
static PyObject *
-_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_224_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -818,7 +872,7 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -827,7 +881,7 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -836,17 +890,18 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -857,7 +912,7 @@ _hashlib_openssl_sha3_224(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -866,12 +921,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_224_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_224_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -882,7 +943,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_256__doc__,
-"openssl_sha3_256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_256($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-256 hash object; optionally initialized with a string");
@@ -891,8 +953,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_256__doc__,
{"openssl_sha3_256", _PyCFunction_CAST(_hashlib_openssl_sha3_256), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_256__doc__},
static PyObject *
-_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -900,7 +962,7 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -909,7 +971,7 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -918,17 +980,18 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -939,7 +1002,7 @@ _hashlib_openssl_sha3_256(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -948,12 +1011,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_256_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_256_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -964,7 +1033,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_384__doc__,
-"openssl_sha3_384($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_384($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-384 hash object; optionally initialized with a string");
@@ -973,8 +1043,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_384__doc__,
{"openssl_sha3_384", _PyCFunction_CAST(_hashlib_openssl_sha3_384), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_384__doc__},
static PyObject *
-_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_384_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -982,7 +1052,7 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -991,7 +1061,7 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -1000,17 +1070,18 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_384",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1021,7 +1092,7 @@ _hashlib_openssl_sha3_384(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1030,12 +1101,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_384_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_384_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1046,7 +1123,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHA3)
PyDoc_STRVAR(_hashlib_openssl_sha3_512__doc__,
-"openssl_sha3_512($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_sha3_512($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a sha3-512 hash object; optionally initialized with a string");
@@ -1055,8 +1133,8 @@ PyDoc_STRVAR(_hashlib_openssl_sha3_512__doc__,
{"openssl_sha3_512", _PyCFunction_CAST(_hashlib_openssl_sha3_512), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_sha3_512__doc__},
static PyObject *
-_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_sha3_512_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -1064,7 +1142,7 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -1073,7 +1151,7 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -1082,17 +1160,18 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_sha3_512",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1103,7 +1182,7 @@ _hashlib_openssl_sha3_512(PyObject *module, PyObject *const *args, Py_ssize_t na
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1112,12 +1191,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_sha3_512_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_sha3_512_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1128,7 +1213,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHAKE)
PyDoc_STRVAR(_hashlib_openssl_shake_128__doc__,
-"openssl_shake_128($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_shake_128($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a shake-128 variable hash object; optionally initialized with a string");
@@ -1137,8 +1223,8 @@ PyDoc_STRVAR(_hashlib_openssl_shake_128__doc__,
{"openssl_shake_128", _PyCFunction_CAST(_hashlib_openssl_shake_128), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_128__doc__},
static PyObject *
-_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_shake_128_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -1146,7 +1232,7 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -1155,7 +1241,7 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -1164,17 +1250,18 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_shake_128",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1185,7 +1272,7 @@ _hashlib_openssl_shake_128(PyObject *module, PyObject *const *args, Py_ssize_t n
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1194,12 +1281,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_shake_128_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_shake_128_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1210,7 +1303,8 @@ exit:
#if defined(PY_OPENSSL_HAS_SHAKE)
PyDoc_STRVAR(_hashlib_openssl_shake_256__doc__,
-"openssl_shake_256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"openssl_shake_256($module, /, data=b\'\', *, usedforsecurity=True,\n"
+" string=None)\n"
"--\n"
"\n"
"Returns a shake-256 variable hash object; optionally initialized with a string");
@@ -1219,8 +1313,8 @@ PyDoc_STRVAR(_hashlib_openssl_shake_256__doc__,
{"openssl_shake_256", _PyCFunction_CAST(_hashlib_openssl_shake_256), METH_FASTCALL|METH_KEYWORDS, _hashlib_openssl_shake_256__doc__},
static PyObject *
-_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data_obj,
- int usedforsecurity);
+_hashlib_openssl_shake_256_impl(PyObject *module, PyObject *data,
+ int usedforsecurity, PyObject *string);
static PyObject *
_hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -1228,7 +1322,7 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -1237,7 +1331,7 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -1246,17 +1340,18 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "openssl_shake_256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *data_obj = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -1267,7 +1362,7 @@ _hashlib_openssl_shake_256(PyObject *module, PyObject *const *args, Py_ssize_t n
goto skip_optional_pos;
}
if (args[0]) {
- data_obj = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -1276,12 +1371,18 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = args[2];
skip_optional_kwonly:
- return_value = _hashlib_openssl_shake_256_impl(module, data_obj, usedforsecurity);
+ return_value = _hashlib_openssl_shake_256_impl(module, data, usedforsecurity, string);
exit:
return return_value;
@@ -1883,4 +1984,4 @@ exit:
#ifndef _HASHLIB_SCRYPT_METHODDEF
#define _HASHLIB_SCRYPT_METHODDEF
#endif /* !defined(_HASHLIB_SCRYPT_METHODDEF) */
-/*[clinic end generated code: output=dc03b64435166a64 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=29f4aaf01714778e input=a9049054013a1b77]*/
diff --git a/Modules/clinic/_randommodule.c.h b/Modules/clinic/_randommodule.c.h
index 1e989e970c9..2563a16aea0 100644
--- a/Modules/clinic/_randommodule.c.h
+++ b/Modules/clinic/_randommodule.c.h
@@ -3,6 +3,7 @@ preserve
[clinic start generated code]*/
#include "pycore_critical_section.h"// Py_BEGIN_CRITICAL_SECTION()
+#include "pycore_long.h" // _PyLong_UInt64_Converter()
#include "pycore_modsupport.h" // _PyArg_CheckPositional()
PyDoc_STRVAR(_random_Random_random__doc__,
@@ -124,16 +125,15 @@ PyDoc_STRVAR(_random_Random_getrandbits__doc__,
{"getrandbits", (PyCFunction)_random_Random_getrandbits, METH_O, _random_Random_getrandbits__doc__},
static PyObject *
-_random_Random_getrandbits_impl(RandomObject *self, int k);
+_random_Random_getrandbits_impl(RandomObject *self, uint64_t k);
static PyObject *
_random_Random_getrandbits(PyObject *self, PyObject *arg)
{
PyObject *return_value = NULL;
- int k;
+ uint64_t k;
- k = PyLong_AsInt(arg);
- if (k == -1 && PyErr_Occurred()) {
+ if (!_PyLong_UInt64_Converter(arg, &k)) {
goto exit;
}
Py_BEGIN_CRITICAL_SECTION(self);
@@ -143,4 +143,4 @@ _random_Random_getrandbits(PyObject *self, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=4458b5a69201ebea input=a9049054013a1b77]*/
+/*[clinic end generated code: output=7ce97b2194eecaf7 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/blake2module.c.h b/Modules/clinic/blake2module.c.h
index bb2e308574a..9e9cd56e569 100644
--- a/Modules/clinic/blake2module.c.h
+++ b/Modules/clinic/blake2module.c.h
@@ -10,20 +10,21 @@ preserve
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
PyDoc_STRVAR(py_blake2b_new__doc__,
-"blake2b(data=b\'\', /, *, digest_size=_blake2.blake2b.MAX_DIGEST_SIZE,\n"
+"blake2b(data=b\'\', *, digest_size=_blake2.blake2b.MAX_DIGEST_SIZE,\n"
" key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n"
" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n"
-" usedforsecurity=True)\n"
+" usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new BLAKE2b hash object.");
static PyObject *
-py_blake2b_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2b_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity);
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string);
static PyObject *
py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
@@ -31,7 +32,7 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 12
+ #define NUM_KEYWORDS 14
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -40,7 +41,7 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -49,18 +50,18 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "blake2b",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[13];
+ PyObject *argsbuf[14];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
- PyObject *data = NULL;
+ PyObject *data_obj = NULL;
int digest_size = HACL_HASH_BLAKE2B_OUT_BYTES;
Py_buffer key = {NULL, NULL};
Py_buffer salt = {NULL, NULL};
@@ -73,18 +74,23 @@ py_blake2b_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
int inner_size = 0;
int last_node = 0;
int usedforsecurity = 1;
+ PyObject *string = NULL;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
if (!fastargs) {
goto exit;
}
- if (nargs < 1) {
- goto skip_optional_posonly;
+ if (!noptargs) {
+ goto skip_optional_pos;
}
- noptargs--;
- data = fastargs[0];
-skip_optional_posonly:
+ if (fastargs[0]) {
+ data_obj = fastargs[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
@@ -182,12 +188,18 @@ skip_optional_posonly:
goto skip_optional_kwonly;
}
}
- usedforsecurity = PyObject_IsTrue(fastargs[12]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (fastargs[12]) {
+ usedforsecurity = PyObject_IsTrue(fastargs[12]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = fastargs[13];
skip_optional_kwonly:
- return_value = py_blake2b_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
+ return_value = py_blake2b_new_impl(type, data_obj, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity, string);
exit:
/* Cleanup for key */
@@ -207,20 +219,21 @@ exit:
}
PyDoc_STRVAR(py_blake2s_new__doc__,
-"blake2s(data=b\'\', /, *, digest_size=_blake2.blake2s.MAX_DIGEST_SIZE,\n"
+"blake2s(data=b\'\', *, digest_size=_blake2.blake2s.MAX_DIGEST_SIZE,\n"
" key=b\'\', salt=b\'\', person=b\'\', fanout=1, depth=1, leaf_size=0,\n"
" node_offset=0, node_depth=0, inner_size=0, last_node=False,\n"
-" usedforsecurity=True)\n"
+" usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new BLAKE2s hash object.");
static PyObject *
-py_blake2s_new_impl(PyTypeObject *type, PyObject *data, int digest_size,
+py_blake2s_new_impl(PyTypeObject *type, PyObject *data_obj, int digest_size,
Py_buffer *key, Py_buffer *salt, Py_buffer *person,
int fanout, int depth, unsigned long leaf_size,
unsigned long long node_offset, int node_depth,
- int inner_size, int last_node, int usedforsecurity);
+ int inner_size, int last_node, int usedforsecurity,
+ PyObject *string);
static PyObject *
py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
@@ -228,7 +241,7 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 12
+ #define NUM_KEYWORDS 14
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -237,7 +250,7 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(digest_size), &_Py_ID(key), &_Py_ID(salt), &_Py_ID(person), &_Py_ID(fanout), &_Py_ID(depth), &_Py_ID(leaf_size), &_Py_ID(node_offset), &_Py_ID(node_depth), &_Py_ID(inner_size), &_Py_ID(last_node), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -246,18 +259,18 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "digest_size", "key", "salt", "person", "fanout", "depth", "leaf_size", "node_offset", "node_depth", "inner_size", "last_node", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "blake2s",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[13];
+ PyObject *argsbuf[14];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
- PyObject *data = NULL;
+ PyObject *data_obj = NULL;
int digest_size = HACL_HASH_BLAKE2S_OUT_BYTES;
Py_buffer key = {NULL, NULL};
Py_buffer salt = {NULL, NULL};
@@ -270,18 +283,23 @@ py_blake2s_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
int inner_size = 0;
int last_node = 0;
int usedforsecurity = 1;
+ PyObject *string = NULL;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
if (!fastargs) {
goto exit;
}
- if (nargs < 1) {
- goto skip_optional_posonly;
+ if (!noptargs) {
+ goto skip_optional_pos;
}
- noptargs--;
- data = fastargs[0];
-skip_optional_posonly:
+ if (fastargs[0]) {
+ data_obj = fastargs[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
+ }
+skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
@@ -379,12 +397,18 @@ skip_optional_posonly:
goto skip_optional_kwonly;
}
}
- usedforsecurity = PyObject_IsTrue(fastargs[12]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (fastargs[12]) {
+ usedforsecurity = PyObject_IsTrue(fastargs[12]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = fastargs[13];
skip_optional_kwonly:
- return_value = py_blake2s_new_impl(type, data, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity);
+ return_value = py_blake2s_new_impl(type, data_obj, digest_size, &key, &salt, &person, fanout, depth, leaf_size, node_offset, node_depth, inner_size, last_node, usedforsecurity, string);
exit:
/* Cleanup for key */
@@ -478,4 +502,4 @@ _blake2_blake2b_hexdigest(PyObject *self, PyObject *Py_UNUSED(ignored))
{
return _blake2_blake2b_hexdigest_impl((Blake2Object *)self);
}
-/*[clinic end generated code: output=d30e8293bd8e2950 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=eed18dcfaf6f7731 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/mathmodule.c.h b/Modules/clinic/mathmodule.c.h
index 9df73b187bb..fbb012fb6dd 100644
--- a/Modules/clinic/mathmodule.c.h
+++ b/Modules/clinic/mathmodule.c.h
@@ -628,6 +628,74 @@ exit:
return return_value;
}
+PyDoc_STRVAR(math_isnormal__doc__,
+"isnormal($module, x, /)\n"
+"--\n"
+"\n"
+"Return True if x is normal, and False otherwise.");
+
+#define MATH_ISNORMAL_METHODDEF \
+ {"isnormal", (PyCFunction)math_isnormal, METH_O, math_isnormal__doc__},
+
+static PyObject *
+math_isnormal_impl(PyObject *module, double x);
+
+static PyObject *
+math_isnormal(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ double x;
+
+ if (PyFloat_CheckExact(arg)) {
+ x = PyFloat_AS_DOUBLE(arg);
+ }
+ else
+ {
+ x = PyFloat_AsDouble(arg);
+ if (x == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ return_value = math_isnormal_impl(module, x);
+
+exit:
+ return return_value;
+}
+
+PyDoc_STRVAR(math_issubnormal__doc__,
+"issubnormal($module, x, /)\n"
+"--\n"
+"\n"
+"Return True if x is subnormal, and False otherwise.");
+
+#define MATH_ISSUBNORMAL_METHODDEF \
+ {"issubnormal", (PyCFunction)math_issubnormal, METH_O, math_issubnormal__doc__},
+
+static PyObject *
+math_issubnormal_impl(PyObject *module, double x);
+
+static PyObject *
+math_issubnormal(PyObject *module, PyObject *arg)
+{
+ PyObject *return_value = NULL;
+ double x;
+
+ if (PyFloat_CheckExact(arg)) {
+ x = PyFloat_AS_DOUBLE(arg);
+ }
+ else
+ {
+ x = PyFloat_AsDouble(arg);
+ if (x == -1.0 && PyErr_Occurred()) {
+ goto exit;
+ }
+ }
+ return_value = math_issubnormal_impl(module, x);
+
+exit:
+ return return_value;
+}
+
PyDoc_STRVAR(math_isnan__doc__,
"isnan($module, x, /)\n"
"--\n"
@@ -1110,4 +1178,4 @@ math_ulp(PyObject *module, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=77e7b8c161c39843 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=44bba3a0a052a364 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/md5module.c.h b/Modules/clinic/md5module.c.h
index 9ca4f6528ce..f76902586dd 100644
--- a/Modules/clinic/md5module.c.h
+++ b/Modules/clinic/md5module.c.h
@@ -89,7 +89,7 @@ MD5Type_update(PyObject *self, PyObject *obj)
}
PyDoc_STRVAR(_md5_md5__doc__,
-"md5($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"md5($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new MD5 hash object; optionally initialized with a string.");
@@ -98,7 +98,8 @@ PyDoc_STRVAR(_md5_md5__doc__,
{"md5", _PyCFunction_CAST(_md5_md5), METH_FASTCALL|METH_KEYWORDS, _md5_md5__doc__},
static PyObject *
-_md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_md5_md5_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -106,7 +107,7 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -115,7 +116,7 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -124,17 +125,18 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "md5",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -145,7 +147,7 @@ _md5_md5(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kw
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -154,14 +156,20 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _md5_md5_impl(module, string, usedforsecurity);
+ return_value = _md5_md5_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
-/*[clinic end generated code: output=73f4d2034d9fcc63 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=920fe54b9ed06f92 input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha1module.c.h b/Modules/clinic/sha1module.c.h
index 3e5fd1a41ce..4a58d0cd9b8 100644
--- a/Modules/clinic/sha1module.c.h
+++ b/Modules/clinic/sha1module.c.h
@@ -89,7 +89,7 @@ SHA1Type_update(PyObject *self, PyObject *obj)
}
PyDoc_STRVAR(_sha1_sha1__doc__,
-"sha1($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha1($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA1 hash object; optionally initialized with a string.");
@@ -98,7 +98,8 @@ PyDoc_STRVAR(_sha1_sha1__doc__,
{"sha1", _PyCFunction_CAST(_sha1_sha1), METH_FASTCALL|METH_KEYWORDS, _sha1_sha1__doc__},
static PyObject *
-_sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha1_sha1_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -106,7 +107,7 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -115,7 +116,7 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -124,17 +125,18 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha1",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -145,7 +147,7 @@ _sha1_sha1(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -154,14 +156,20 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha1_sha1_impl(module, string, usedforsecurity);
+ return_value = _sha1_sha1_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
-/*[clinic end generated code: output=06161e87e2d645d4 input=a9049054013a1b77]*/
+/*[clinic end generated code: output=fd5a917404b68c4f input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha2module.c.h b/Modules/clinic/sha2module.c.h
index 26612125e75..07be91e4f6c 100644
--- a/Modules/clinic/sha2module.c.h
+++ b/Modules/clinic/sha2module.c.h
@@ -169,7 +169,7 @@ SHA512Type_update(PyObject *self, PyObject *obj)
}
PyDoc_STRVAR(_sha2_sha256__doc__,
-"sha256($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha256($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-256 hash object; optionally initialized with a string.");
@@ -178,7 +178,8 @@ PyDoc_STRVAR(_sha2_sha256__doc__,
{"sha256", _PyCFunction_CAST(_sha2_sha256), METH_FASTCALL|METH_KEYWORDS, _sha2_sha256__doc__},
static PyObject *
-_sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha256_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -186,7 +187,7 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -195,7 +196,7 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -204,17 +205,18 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha256",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -225,7 +227,7 @@ _sha2_sha256(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -234,19 +236,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha256_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha256_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
PyDoc_STRVAR(_sha2_sha224__doc__,
-"sha224($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha224($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-224 hash object; optionally initialized with a string.");
@@ -255,7 +263,8 @@ PyDoc_STRVAR(_sha2_sha224__doc__,
{"sha224", _PyCFunction_CAST(_sha2_sha224), METH_FASTCALL|METH_KEYWORDS, _sha2_sha224__doc__},
static PyObject *
-_sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha224_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -263,7 +272,7 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -272,7 +281,7 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -281,17 +290,18 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -302,7 +312,7 @@ _sha2_sha224(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -311,19 +321,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha224_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha224_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
PyDoc_STRVAR(_sha2_sha512__doc__,
-"sha512($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha512($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-512 hash object; optionally initialized with a string.");
@@ -332,7 +348,8 @@ PyDoc_STRVAR(_sha2_sha512__doc__,
{"sha512", _PyCFunction_CAST(_sha2_sha512), METH_FASTCALL|METH_KEYWORDS, _sha2_sha512__doc__},
static PyObject *
-_sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha512_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -340,7 +357,7 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -349,7 +366,7 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -358,17 +375,18 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha512",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -379,7 +397,7 @@ _sha2_sha512(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -388,19 +406,25 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha512_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha512_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
PyDoc_STRVAR(_sha2_sha384__doc__,
-"sha384($module, /, string=b\'\', *, usedforsecurity=True)\n"
+"sha384($module, /, data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA-384 hash object; optionally initialized with a string.");
@@ -409,7 +433,8 @@ PyDoc_STRVAR(_sha2_sha384__doc__,
{"sha384", _PyCFunction_CAST(_sha2_sha384), METH_FASTCALL|METH_KEYWORDS, _sha2_sha384__doc__},
static PyObject *
-_sha2_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity);
+_sha2_sha384_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj);
static PyObject *
_sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
@@ -417,7 +442,7 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 2
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -426,7 +451,7 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(string), &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -435,17 +460,18 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"string", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha384",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 0;
- PyObject *string = NULL;
+ PyObject *data = NULL;
int usedforsecurity = 1;
+ PyObject *string_obj = NULL;
args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
@@ -456,7 +482,7 @@ _sha2_sha384(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject
goto skip_optional_pos;
}
if (args[0]) {
- string = args[0];
+ data = args[0];
if (!--noptargs) {
goto skip_optional_pos;
}
@@ -465,14 +491,20 @@ skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(args[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (args[1]) {
+ usedforsecurity = PyObject_IsTrue(args[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string_obj = args[2];
skip_optional_kwonly:
- return_value = _sha2_sha384_impl(module, string, usedforsecurity);
+ return_value = _sha2_sha384_impl(module, data, usedforsecurity, string_obj);
exit:
return return_value;
}
-/*[clinic end generated code: output=af11090855b7c85a input=a9049054013a1b77]*/
+/*[clinic end generated code: output=90625b237c774a9f input=a9049054013a1b77]*/
diff --git a/Modules/clinic/sha3module.c.h b/Modules/clinic/sha3module.c.h
index 25f72b74f80..121be2c0758 100644
--- a/Modules/clinic/sha3module.c.h
+++ b/Modules/clinic/sha3module.c.h
@@ -10,13 +10,14 @@ preserve
#include "pycore_modsupport.h" // _PyArg_UnpackKeywords()
PyDoc_STRVAR(py_sha3_new__doc__,
-"sha3_224(data=b\'\', /, *, usedforsecurity=True)\n"
+"sha3_224(data=b\'\', *, usedforsecurity=True, string=None)\n"
"--\n"
"\n"
"Return a new SHA3 hash object.");
static PyObject *
-py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity);
+py_sha3_new_impl(PyTypeObject *type, PyObject *data_obj, int usedforsecurity,
+ PyObject *string);
static PyObject *
py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
@@ -24,7 +25,7 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
PyObject *return_value = NULL;
#if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
- #define NUM_KEYWORDS 1
+ #define NUM_KEYWORDS 3
static struct {
PyGC_Head _this_is_not_used;
PyObject_VAR_HEAD
@@ -33,7 +34,7 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
} _kwtuple = {
.ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
.ob_hash = -1,
- .ob_item = { &_Py_ID(usedforsecurity), },
+ .ob_item = { &_Py_ID(data), &_Py_ID(usedforsecurity), &_Py_ID(string), },
};
#undef NUM_KEYWORDS
#define KWTUPLE (&_kwtuple.ob_base.ob_base)
@@ -42,40 +43,51 @@ py_sha3_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)
# define KWTUPLE NULL
#endif // !Py_BUILD_CORE
- static const char * const _keywords[] = {"", "usedforsecurity", NULL};
+ static const char * const _keywords[] = {"data", "usedforsecurity", "string", NULL};
static _PyArg_Parser _parser = {
.keywords = _keywords,
.fname = "sha3_224",
.kwtuple = KWTUPLE,
};
#undef KWTUPLE
- PyObject *argsbuf[2];
+ PyObject *argsbuf[3];
PyObject * const *fastargs;
Py_ssize_t nargs = PyTuple_GET_SIZE(args);
Py_ssize_t noptargs = nargs + (kwargs ? PyDict_GET_SIZE(kwargs) : 0) - 0;
- PyObject *data = NULL;
+ PyObject *data_obj = NULL;
int usedforsecurity = 1;
+ PyObject *string = NULL;
fastargs = _PyArg_UnpackKeywords(_PyTuple_CAST(args)->ob_item, nargs, kwargs, NULL, &_parser,
/*minpos*/ 0, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
if (!fastargs) {
goto exit;
}
- if (nargs < 1) {
- goto skip_optional_posonly;
+ if (!noptargs) {
+ goto skip_optional_pos;
+ }
+ if (fastargs[0]) {
+ data_obj = fastargs[0];
+ if (!--noptargs) {
+ goto skip_optional_pos;
+ }
}
- noptargs--;
- data = fastargs[0];
-skip_optional_posonly:
+skip_optional_pos:
if (!noptargs) {
goto skip_optional_kwonly;
}
- usedforsecurity = PyObject_IsTrue(fastargs[1]);
- if (usedforsecurity < 0) {
- goto exit;
+ if (fastargs[1]) {
+ usedforsecurity = PyObject_IsTrue(fastargs[1]);
+ if (usedforsecurity < 0) {
+ goto exit;
+ }
+ if (!--noptargs) {
+ goto skip_optional_kwonly;
+ }
}
+ string = fastargs[2];
skip_optional_kwonly:
- return_value = py_sha3_new_impl(type, data, usedforsecurity);
+ return_value = py_sha3_new_impl(type, data_obj, usedforsecurity, string);
exit:
return return_value;
@@ -158,24 +170,57 @@ _sha3_sha3_224_update(PyObject *self, PyObject *data)
}
PyDoc_STRVAR(_sha3_shake_128_digest__doc__,
-"digest($self, length, /)\n"
+"digest($self, /, length)\n"
"--\n"
"\n"
"Return the digest value as a bytes object.");
#define _SHA3_SHAKE_128_DIGEST_METHODDEF \
- {"digest", (PyCFunction)_sha3_shake_128_digest, METH_O, _sha3_shake_128_digest__doc__},
+ {"digest", _PyCFunction_CAST(_sha3_shake_128_digest), METH_FASTCALL|METH_KEYWORDS, _sha3_shake_128_digest__doc__},
static PyObject *
_sha3_shake_128_digest_impl(SHA3object *self, unsigned long length);
static PyObject *
-_sha3_shake_128_digest(PyObject *self, PyObject *arg)
+_sha3_shake_128_digest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ Py_hash_t ob_hash;
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_hash = -1,
+ .ob_item = { &_Py_ID(length), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"length", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "digest",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
unsigned long length;
- if (!_PyLong_UnsignedLong_Converter(arg, &length)) {
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
+ /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!_PyLong_UnsignedLong_Converter(args[0], &length)) {
goto exit;
}
return_value = _sha3_shake_128_digest_impl((SHA3object *)self, length);
@@ -185,24 +230,57 @@ exit:
}
PyDoc_STRVAR(_sha3_shake_128_hexdigest__doc__,
-"hexdigest($self, length, /)\n"
+"hexdigest($self, /, length)\n"
"--\n"
"\n"
"Return the digest value as a string of hexadecimal digits.");
#define _SHA3_SHAKE_128_HEXDIGEST_METHODDEF \
- {"hexdigest", (PyCFunction)_sha3_shake_128_hexdigest, METH_O, _sha3_shake_128_hexdigest__doc__},
+ {"hexdigest", _PyCFunction_CAST(_sha3_shake_128_hexdigest), METH_FASTCALL|METH_KEYWORDS, _sha3_shake_128_hexdigest__doc__},
static PyObject *
_sha3_shake_128_hexdigest_impl(SHA3object *self, unsigned long length);
static PyObject *
-_sha3_shake_128_hexdigest(PyObject *self, PyObject *arg)
+_sha3_shake_128_hexdigest(PyObject *self, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames)
{
PyObject *return_value = NULL;
+ #if defined(Py_BUILD_CORE) && !defined(Py_BUILD_CORE_MODULE)
+
+ #define NUM_KEYWORDS 1
+ static struct {
+ PyGC_Head _this_is_not_used;
+ PyObject_VAR_HEAD
+ Py_hash_t ob_hash;
+ PyObject *ob_item[NUM_KEYWORDS];
+ } _kwtuple = {
+ .ob_base = PyVarObject_HEAD_INIT(&PyTuple_Type, NUM_KEYWORDS)
+ .ob_hash = -1,
+ .ob_item = { &_Py_ID(length), },
+ };
+ #undef NUM_KEYWORDS
+ #define KWTUPLE (&_kwtuple.ob_base.ob_base)
+
+ #else // !Py_BUILD_CORE
+ # define KWTUPLE NULL
+ #endif // !Py_BUILD_CORE
+
+ static const char * const _keywords[] = {"length", NULL};
+ static _PyArg_Parser _parser = {
+ .keywords = _keywords,
+ .fname = "hexdigest",
+ .kwtuple = KWTUPLE,
+ };
+ #undef KWTUPLE
+ PyObject *argsbuf[1];
unsigned long length;
- if (!_PyLong_UnsignedLong_Converter(arg, &length)) {
+ args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser,
+ /*minpos*/ 1, /*maxpos*/ 1, /*minkw*/ 0, /*varpos*/ 0, argsbuf);
+ if (!args) {
+ goto exit;
+ }
+ if (!_PyLong_UnsignedLong_Converter(args[0], &length)) {
goto exit;
}
return_value = _sha3_shake_128_hexdigest_impl((SHA3object *)self, length);
@@ -210,4 +288,4 @@ _sha3_shake_128_hexdigest(PyObject *self, PyObject *arg)
exit:
return return_value;
}
-/*[clinic end generated code: output=5b3ac1c06c6899ea input=a9049054013a1b77]*/
+/*[clinic end generated code: output=65e437799472b89f input=a9049054013a1b77]*/
diff --git a/Modules/hashlib.h b/Modules/hashlib.h
index 7105e68af7b..e82ec92be25 100644
--- a/Modules/hashlib.h
+++ b/Modules/hashlib.h
@@ -76,3 +76,41 @@
* to allow the user to optimize based on the platform they're using. */
#define HASHLIB_GIL_MINSIZE 2048
+static inline int
+_Py_hashlib_data_argument(PyObject **res, PyObject *data, PyObject *string)
+{
+ if (data != NULL && string == NULL) {
+ // called as H(data) or H(data=...)
+ *res = data;
+ return 1;
+ }
+ else if (data == NULL && string != NULL) {
+ // called as H(string=...)
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "the 'string' keyword parameter is deprecated since "
+ "Python 3.15 and slated for removal in Python 3.19; "
+ "use the 'data' keyword parameter or pass the data "
+ "to hash as a positional argument instead", 1) < 0)
+ {
+ *res = NULL;
+ return -1;
+ }
+ *res = string;
+ return 1;
+ }
+ else if (data == NULL && string == NULL) {
+ // fast path when no data is given
+ assert(!PyErr_Occurred());
+ *res = NULL;
+ return 0;
+ }
+ else {
+ // called as H(data=..., string)
+ *res = NULL;
+ PyErr_SetString(PyExc_TypeError,
+ "'data' and 'string' are mutually exclusive "
+ "and support for 'string' keyword parameter "
+ "is slated for removal in a future version.");
+ return -1;
+ }
+}
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index 943c1e8607b..2003546ce84 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -1124,7 +1124,6 @@ typedef struct {
PyObject *it;
PyObject *saved;
Py_ssize_t index;
- int firstpass;
} cycleobject;
#define cycleobject_CAST(op) ((cycleobject *)(op))
@@ -1165,8 +1164,7 @@ itertools_cycle_impl(PyTypeObject *type, PyObject *iterable)
}
lz->it = it;
lz->saved = saved;
- lz->index = 0;
- lz->firstpass = 0;
+ lz->index = -1;
return (PyObject *)lz;
}
@@ -1199,11 +1197,11 @@ cycle_next(PyObject *op)
cycleobject *lz = cycleobject_CAST(op);
PyObject *item;
- if (lz->it != NULL) {
+ Py_ssize_t index = FT_ATOMIC_LOAD_SSIZE_RELAXED(lz->index);
+
+ if (index < 0) {
item = PyIter_Next(lz->it);
if (item != NULL) {
- if (lz->firstpass)
- return item;
if (PyList_Append(lz->saved, item)) {
Py_DECREF(item);
return NULL;
@@ -1213,15 +1211,22 @@ cycle_next(PyObject *op)
/* Note: StopIteration is already cleared by PyIter_Next() */
if (PyErr_Occurred())
return NULL;
+ index = 0;
+ FT_ATOMIC_STORE_SSIZE_RELAXED(lz->index, 0);
+#ifndef Py_GIL_DISABLED
Py_CLEAR(lz->it);
+#endif
}
if (PyList_GET_SIZE(lz->saved) == 0)
return NULL;
- item = PyList_GET_ITEM(lz->saved, lz->index);
- lz->index++;
- if (lz->index >= PyList_GET_SIZE(lz->saved))
- lz->index = 0;
- return Py_NewRef(item);
+ item = PyList_GetItemRef(lz->saved, index);
+ assert(item);
+ index++;
+ if (index >= PyList_GET_SIZE(lz->saved)) {
+ index = 0;
+ }
+ FT_ATOMIC_STORE_SSIZE_RELAXED(lz->index, index);
+ return item;
}
static PyType_Slot cycle_slots[] = {
diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c
index 71d9c1387f5..bbbb4911568 100644
--- a/Modules/mathmodule.c
+++ b/Modules/mathmodule.c
@@ -3119,6 +3119,44 @@ math_isfinite_impl(PyObject *module, double x)
/*[clinic input]
+math.isnormal
+
+ x: double
+ /
+
+Return True if x is normal, and False otherwise.
+[clinic start generated code]*/
+
+static PyObject *
+math_isnormal_impl(PyObject *module, double x)
+/*[clinic end generated code: output=c7b302b5b89c3541 input=fdaa00c58aa7bc17]*/
+{
+ return PyBool_FromLong(isnormal(x));
+}
+
+
+/*[clinic input]
+math.issubnormal
+
+ x: double
+ /
+
+Return True if x is subnormal, and False otherwise.
+[clinic start generated code]*/
+
+static PyObject *
+math_issubnormal_impl(PyObject *module, double x)
+/*[clinic end generated code: output=4e76ac98ddcae761 input=9a20aba7107d0d95]*/
+{
+#if defined(__STDC_VERSION__) && __STDC_VERSION__ >= 202311L
+ return PyBool_FromLong(issubnormal(x));
+#else
+ return PyBool_FromLong(isfinite(x) && x && !isnormal(x));
+#endif
+}
+
+
+/*[clinic input]
math.isnan
x: double
@@ -4145,6 +4183,8 @@ static PyMethodDef math_methods[] = {
MATH_HYPOT_METHODDEF
MATH_ISCLOSE_METHODDEF
MATH_ISFINITE_METHODDEF
+ MATH_ISNORMAL_METHODDEF
+ MATH_ISSUBNORMAL_METHODDEF
MATH_ISINF_METHODDEF
MATH_ISNAN_METHODDEF
MATH_ISQRT_METHODDEF
diff --git a/Modules/md5module.c b/Modules/md5module.c
index c36eb41d4d2..9b5ea2d6e02 100644
--- a/Modules/md5module.c
+++ b/Modules/md5module.c
@@ -276,17 +276,24 @@ static PyType_Spec md5_type_spec = {
/*[clinic input]
_md5.md5
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new MD5 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_md5_md5_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=587071f76254a4ac input=7a144a1905636985]*/
+_md5_md5_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=d45e187d3d16f3a8 input=7ea5c5366dbb44bf]*/
{
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
+
MD5object *new;
Py_buffer buf;
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 588894adeac..7dc5ef39a56 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -8820,14 +8820,14 @@ os_ptsname_impl(PyObject *module, int fd)
#if defined(HAVE_OPENPTY) || defined(HAVE_FORKPTY) || defined(HAVE_LOGIN_TTY) || defined(HAVE_DEV_PTMX)
#ifdef HAVE_PTY_H
#include <pty.h>
-#ifdef HAVE_UTMP_H
-#include <utmp.h>
-#endif /* HAVE_UTMP_H */
#elif defined(HAVE_LIBUTIL_H)
#include <libutil.h>
#elif defined(HAVE_UTIL_H)
#include <util.h>
#endif /* HAVE_PTY_H */
+#ifdef HAVE_UTMP_H
+#include <utmp.h>
+#endif /* HAVE_UTMP_H */
#ifdef HAVE_STROPTS_H
#include <stropts.h>
#endif
@@ -9562,6 +9562,24 @@ os_getlogin_impl(PyObject *module)
}
else
result = PyErr_SetFromWindowsErr(GetLastError());
+#elif defined (HAVE_GETLOGIN_R)
+# if defined (HAVE_MAXLOGNAME)
+ char name[MAXLOGNAME + 1];
+# elif defined (HAVE_UT_NAMESIZE)
+ char name[UT_NAMESIZE + 1];
+# else
+ char name[256];
+# endif
+ int err = getlogin_r(name, sizeof(name));
+ if (err) {
+ int old_errno = errno;
+ errno = -err;
+ posix_error();
+ errno = old_errno;
+ }
+ else {
+ result = PyUnicode_DecodeFSDefault(name);
+ }
#else
char *name;
int old_errno = errno;
diff --git a/Modules/sha1module.c b/Modules/sha1module.c
index f4a00cdb422..a746bf74f8d 100644
--- a/Modules/sha1module.c
+++ b/Modules/sha1module.c
@@ -272,19 +272,25 @@ static PyType_Spec sha1_type_spec = {
/*[clinic input]
_sha1.sha1
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA1 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha1_sha1_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=6f8b3af05126e18e input=bd54b68e2bf36a8a]*/
+_sha1_sha1_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=0d453775924f88a7 input=807f25264e0ac656]*/
{
SHA1object *new;
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
diff --git a/Modules/sha2module.c b/Modules/sha2module.c
index e88d7cb2d45..72931910c5d 100644
--- a/Modules/sha2module.c
+++ b/Modules/sha2module.c
@@ -594,18 +594,24 @@ static PyType_Spec sha512_type_spec = {
/*[clinic input]
_sha2.sha256
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-256 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=243c9dd289931f87 input=6249da1de607280a]*/
+_sha2_sha256_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=49828a7bcd418f45 input=9ce1d70e669abc14]*/
{
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
@@ -651,18 +657,25 @@ _sha2_sha256_impl(PyObject *module, PyObject *string, int usedforsecurity)
/*[clinic input]
_sha2.sha224
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-224 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=68191f232e4a3843 input=c42bcba47fd7d2b7]*/
+_sha2_sha224_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=2163cb03b6cf6157 input=612f7682a889bc2a]*/
{
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
+
if (string) {
GET_BUFFER_VIEW_OR_ERROUT(string, &buf);
}
@@ -706,19 +719,25 @@ _sha2_sha224_impl(PyObject *module, PyObject *string, int usedforsecurity)
/*[clinic input]
_sha2.sha512
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-512 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=d55c8996eca214d7 input=0576ae2a6ebfad25]*/
+_sha2_sha512_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=cc3fcfce001a4538 input=19c9f2c06d59563a]*/
{
SHA512object *new;
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
sha2_state *state = sha2_get_state(module);
@@ -763,19 +782,25 @@ _sha2_sha512_impl(PyObject *module, PyObject *string, int usedforsecurity)
/*[clinic input]
_sha2.sha384
- string: object(c_default="NULL") = b''
+ data: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string as string_obj: object(c_default="NULL") = None
Return a new SHA-384 hash object; optionally initialized with a string.
[clinic start generated code]*/
static PyObject *
-_sha2_sha384_impl(PyObject *module, PyObject *string, int usedforsecurity)
-/*[clinic end generated code: output=b29a0d81d51d1368 input=4e9199d8de0d2f9b]*/
+_sha2_sha384_impl(PyObject *module, PyObject *data, int usedforsecurity,
+ PyObject *string_obj)
+/*[clinic end generated code: output=b6e3db593b5a0330 input=9fd50c942ad9e0bf]*/
{
SHA512object *new;
Py_buffer buf;
+ PyObject *string;
+ if (_Py_hashlib_data_argument(&string, data, string_obj) < 0) {
+ return NULL;
+ }
sha2_state *state = sha2_get_state(module);
diff --git a/Modules/sha3module.c b/Modules/sha3module.c
index a7edf5c66a1..cfbf0cbcc04 100644
--- a/Modules/sha3module.c
+++ b/Modules/sha3module.c
@@ -105,18 +105,25 @@ sha3_update(Hacl_Hash_SHA3_state_t *state, uint8_t *buf, Py_ssize_t len)
/*[clinic input]
@classmethod
_sha3.sha3_224.__new__ as py_sha3_new
- data: object(c_default="NULL") = b''
- /
+
+ data as data_obj: object(c_default="NULL") = b''
*
usedforsecurity: bool = True
+ string: object(c_default="NULL") = None
Return a new SHA3 hash object.
[clinic start generated code]*/
static PyObject *
-py_sha3_new_impl(PyTypeObject *type, PyObject *data, int usedforsecurity)
-/*[clinic end generated code: output=90409addc5d5e8b0 input=637e5f8f6a93982a]*/
+py_sha3_new_impl(PyTypeObject *type, PyObject *data_obj, int usedforsecurity,
+ PyObject *string)
+/*[clinic end generated code: output=dcec1eca20395f2a input=c106e0b4e2d67d58]*/
{
+ PyObject *data;
+ if (_Py_hashlib_data_argument(&data, data_obj, string) < 0) {
+ return NULL;
+ }
+
Py_buffer buf = {NULL, NULL};
SHA3State *state = _PyType_GetModuleState(type);
SHA3object *self = newSHA3object(type);
@@ -503,14 +510,13 @@ _SHAKE_digest(PyObject *op, unsigned long digestlen, int hex)
_sha3.shake_128.digest
length: unsigned_long
- /
Return the digest value as a bytes object.
[clinic start generated code]*/
static PyObject *
_sha3_shake_128_digest_impl(SHA3object *self, unsigned long length)
-/*[clinic end generated code: output=2313605e2f87bb8f input=418ef6a36d2e6082]*/
+/*[clinic end generated code: output=2313605e2f87bb8f input=93d6d6ff32904f18]*/
{
return _SHAKE_digest((PyObject *)self, length, 0);
}
@@ -520,14 +526,13 @@ _sha3_shake_128_digest_impl(SHA3object *self, unsigned long length)
_sha3.shake_128.hexdigest
length: unsigned_long
- /
Return the digest value as a string of hexadecimal digits.
[clinic start generated code]*/
static PyObject *
_sha3_shake_128_hexdigest_impl(SHA3object *self, unsigned long length)
-/*[clinic end generated code: output=bf8e2f1e490944a8 input=69fb29b0926ae321]*/
+/*[clinic end generated code: output=bf8e2f1e490944a8 input=562d74e7060b56ab]*/
{
return _SHAKE_digest((PyObject *)self, length, 1);
}
diff --git a/Objects/genobject.c b/Objects/genobject.c
index 98b2c5004df..da1462deaaa 100644
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -1451,7 +1451,9 @@ typedef struct PyAsyncGenAThrow {
/* Can be NULL, when in the "aclose()" mode
(equivalent of "athrow(GeneratorExit)") */
- PyObject *agt_args;
+ PyObject *agt_typ;
+ PyObject *agt_tb;
+ PyObject *agt_val;
AwaitableState agt_state;
} PyAsyncGenAThrow;
@@ -2078,7 +2080,9 @@ async_gen_athrow_dealloc(PyObject *self)
_PyObject_GC_UNTRACK(self);
Py_CLEAR(agt->agt_gen);
- Py_CLEAR(agt->agt_args);
+ Py_XDECREF(agt->agt_typ);
+ Py_XDECREF(agt->agt_tb);
+ Py_XDECREF(agt->agt_val);
PyObject_GC_Del(self);
}
@@ -2088,7 +2092,9 @@ async_gen_athrow_traverse(PyObject *self, visitproc visit, void *arg)
{
PyAsyncGenAThrow *agt = _PyAsyncGenAThrow_CAST(self);
Py_VISIT(agt->agt_gen);
- Py_VISIT(agt->agt_args);
+ Py_VISIT(agt->agt_typ);
+ Py_VISIT(agt->agt_tb);
+ Py_VISIT(agt->agt_val);
return 0;
}
@@ -2116,7 +2122,7 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
if (o->agt_state == AWAITABLE_STATE_INIT) {
if (o->agt_gen->ag_running_async) {
o->agt_state = AWAITABLE_STATE_CLOSED;
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
PyErr_SetString(
PyExc_RuntimeError,
"aclose(): asynchronous generator is already running");
@@ -2143,7 +2149,7 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
o->agt_state = AWAITABLE_STATE_ITER;
o->agt_gen->ag_running_async = 1;
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
/* aclose() mode */
o->agt_gen->ag_closed = 1;
@@ -2157,19 +2163,10 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
goto yield_close;
}
} else {
- PyObject *typ;
- PyObject *tb = NULL;
- PyObject *val = NULL;
-
- if (!PyArg_UnpackTuple(o->agt_args, "athrow", 1, 3,
- &typ, &val, &tb)) {
- return NULL;
- }
-
retval = _gen_throw((PyGenObject *)gen,
0, /* Do not close generator when
PyExc_GeneratorExit is passed */
- typ, val, tb);
+ o->agt_typ, o->agt_val, o->agt_tb);
retval = async_gen_unwrap_value(o->agt_gen, retval);
}
if (retval == NULL) {
@@ -2181,7 +2178,7 @@ async_gen_athrow_send(PyObject *self, PyObject *arg)
assert(o->agt_state == AWAITABLE_STATE_ITER);
retval = gen_send((PyObject *)gen, arg);
- if (o->agt_args) {
+ if (o->agt_typ) {
return async_gen_unwrap_value(o->agt_gen, retval);
} else {
/* aclose() mode */
@@ -2212,7 +2209,7 @@ check_error:
if (PyErr_ExceptionMatches(PyExc_StopAsyncIteration) ||
PyErr_ExceptionMatches(PyExc_GeneratorExit))
{
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
/* when aclose() is called we don't want to propagate
StopAsyncIteration or GeneratorExit; just raise
StopIteration, signalling that this 'aclose()' await
@@ -2241,7 +2238,7 @@ async_gen_athrow_throw(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
if (o->agt_state == AWAITABLE_STATE_INIT) {
if (o->agt_gen->ag_running_async) {
o->agt_state = AWAITABLE_STATE_CLOSED;
- if (o->agt_args == NULL) {
+ if (o->agt_typ == NULL) {
PyErr_SetString(
PyExc_RuntimeError,
"aclose(): asynchronous generator is already running");
@@ -2259,7 +2256,7 @@ async_gen_athrow_throw(PyObject *self, PyObject *const *args, Py_ssize_t nargs)
}
PyObject *retval = gen_throw((PyObject*)o->agt_gen, args, nargs);
- if (o->agt_args) {
+ if (o->agt_typ) {
retval = async_gen_unwrap_value(o->agt_gen, retval);
if (retval == NULL) {
o->agt_gen->ag_running_async = 0;
@@ -2334,7 +2331,7 @@ async_gen_athrow_finalize(PyObject *op)
{
PyAsyncGenAThrow *o = (PyAsyncGenAThrow*)op;
if (o->agt_state == AWAITABLE_STATE_INIT) {
- PyObject *method = o->agt_args ? &_Py_ID(athrow) : &_Py_ID(aclose);
+ PyObject *method = o->agt_typ ? &_Py_ID(athrow) : &_Py_ID(aclose);
_PyErr_WarnUnawaitedAgenMethod(o->agt_gen, method);
}
}
@@ -2403,13 +2400,23 @@ PyTypeObject _PyAsyncGenAThrow_Type = {
static PyObject *
async_gen_athrow_new(PyAsyncGenObject *gen, PyObject *args)
{
+ PyObject *typ = NULL;
+ PyObject *tb = NULL;
+ PyObject *val = NULL;
+ if (args && !PyArg_UnpackTuple(args, "athrow", 1, 3, &typ, &val, &tb)) {
+ return NULL;
+ }
+
PyAsyncGenAThrow *o;
o = PyObject_GC_New(PyAsyncGenAThrow, &_PyAsyncGenAThrow_Type);
if (o == NULL) {
return NULL;
}
o->agt_gen = (PyAsyncGenObject*)Py_NewRef(gen);
- o->agt_args = Py_XNewRef(args);
+ o->agt_typ = Py_XNewRef(typ);
+ o->agt_tb = Py_XNewRef(tb);
+ o->agt_val = Py_XNewRef(val);
+
o->agt_state = AWAITABLE_STATE_INIT;
_PyObject_GC_TRACK((PyObject*)o);
return (PyObject*)o;
diff --git a/Objects/longobject.c b/Objects/longobject.c
index 0b2dfa003fa..2b533312fee 100644
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -971,16 +971,9 @@ _PyLong_FromByteArray(const unsigned char* bytes, size_t n,
++numsignificantbytes;
}
- /* How many Python int digits do we need? We have
- 8*numsignificantbytes bits, and each Python int digit has
- PyLong_SHIFT bits, so it's the ceiling of the quotient. */
- /* catch overflow before it happens */
- if (numsignificantbytes > (PY_SSIZE_T_MAX - PyLong_SHIFT) / 8) {
- PyErr_SetString(PyExc_OverflowError,
- "byte array too long to convert to int");
- return NULL;
- }
- ndigits = (numsignificantbytes * 8 + PyLong_SHIFT - 1) / PyLong_SHIFT;
+ /* avoid integer overflow */
+ ndigits = numsignificantbytes / PyLong_SHIFT * 8
+ + (numsignificantbytes % PyLong_SHIFT * 8 + PyLong_SHIFT - 1) / PyLong_SHIFT;
v = long_alloc(ndigits);
if (v == NULL)
return NULL;
diff --git a/PCbuild/_testclinic_limited.vcxproj b/PCbuild/_testclinic_limited.vcxproj
index 183a55080e8..95c205309b1 100644
--- a/PCbuild/_testclinic_limited.vcxproj
+++ b/PCbuild/_testclinic_limited.vcxproj
@@ -70,6 +70,7 @@
<ProjectGuid>{01FDF29A-40A1-46DF-84F5-85EBBD2A2410}</ProjectGuid>
<RootNamespace>_testclinic_limited</RootNamespace>
<Keyword>Win32Proj</Keyword>
+ <SupportPGO>false</SupportPGO>
</PropertyGroup>
<Import Project="python.props" />
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props
index 7272542e13a..ce51e342241 100644
--- a/PCbuild/pyproject.props
+++ b/PCbuild/pyproject.props
@@ -96,19 +96,16 @@
<TargetMachine Condition="'$(Platform)' == 'x64'">MachineX64</TargetMachine>
<TargetMachine Condition="'$(Platform)'=='ARM'">MachineARM</TargetMachine>
<TargetMachine Condition="'$(Platform)'=='ARM64'">MachineARM64</TargetMachine>
- <ProfileGuidedDatabase Condition="$(SupportPGO)">$(OutDir)$(TargetName).pgd</ProfileGuidedDatabase>
- <LinkTimeCodeGeneration Condition="$(Configuration) == 'Release'">UseLinkTimeCodeGeneration</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGInstrument'">PGInstrument</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGUpdate'">PGUpdate</LinkTimeCodeGeneration>
+ <LinkTimeCodeGeneration Condition="$(Configuration) != 'Debug'">UseLinkTimeCodeGeneration</LinkTimeCodeGeneration>
<AdditionalDependencies>advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalOptions Condition="$(Configuration) != 'Debug'">/OPT:REF,NOICF %(AdditionalOptions)</AdditionalOptions>
<AdditionalOptions Condition="$(MSVCHasBrokenARM64Clamping) == 'true' and $(Platform) == 'ARM64'">-d2:-pattern-opt-disable:-932189325 %(AdditionalOptions)</AdditionalOptions>
+ <AdditionalOptions Condition="$(SupportPGO) and $(Configuration) == 'PGInstrument'">/GENPROFILE %(AdditionalOptions)</AdditionalOptions>
+ <AdditionalOptions Condition="$(SupportPGO) and $(Configuration) == 'PGUpdate'">/USEPROFILE %(AdditionalOptions)</AdditionalOptions>
</Link>
<Lib>
<LinkTimeCodeGeneration>false</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(Configuration) == 'Release'">true</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGInstrument'">true</LinkTimeCodeGeneration>
- <LinkTimeCodeGeneration Condition="$(SupportPGO) and $(Configuration) == 'PGUpdate'">true</LinkTimeCodeGeneration>
+ <LinkTimeCodeGeneration Condition="$(Configuration) != 'Debug'">true</LinkTimeCodeGeneration>
</Lib>
<ResourceCompile>
<AdditionalIncludeDirectories>$(PySourcePath)PC;$(PySourcePath)Include;$(IntDir);%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
diff --git a/Parser/parser.c b/Parser/parser.c
index 84a293cddff..d5aafef826e 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -14,7 +14,7 @@
# define MAXSTACK 4000
# endif
#else
-# define MAXSTACK 4000
+# define MAXSTACK 6000
#endif
static const int n_keyword_lists = 9;
static KeywordToken *reserved_keywords[] = {
diff --git a/Python/asm_trampoline.S b/Python/asm_trampoline.S
index 0a3265dfeee..616752459ba 100644
--- a/Python/asm_trampoline.S
+++ b/Python/asm_trampoline.S
@@ -9,6 +9,9 @@
# }
_Py_trampoline_func_start:
#ifdef __x86_64__
+#if defined(__CET__) && (__CET__ & 1)
+ endbr64
+#endif
sub $8, %rsp
call *%rcx
add $8, %rsp
@@ -34,3 +37,22 @@ _Py_trampoline_func_start:
.globl _Py_trampoline_func_end
_Py_trampoline_func_end:
.section .note.GNU-stack,"",@progbits
+# Note for indicating the assembly code supports CET
+#if defined(__x86_64__) && defined(__CET__) && (__CET__ & 1)
+ .section .note.gnu.property,"a"
+ .align 8
+ .long 1f - 0f
+ .long 4f - 1f
+ .long 5
+0:
+ .string "GNU"
+1:
+ .align 8
+ .long 0xc0000002
+ .long 3f - 2f
+2:
+ .long 0x3
+3:
+ .align 8
+4:
+#endif // __x86_64__
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index a1f8d360528..c4b13da5db4 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -3125,100 +3125,49 @@ dummy_func(
}
replaced op(_FOR_ITER, (iter, null_or_index -- iter, null_or_index, next)) {
- /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (PyStackRef_IsTaggedInt(null_or_index)) {
- next = _PyForIter_NextWithIndex(iter_o, null_or_index);
- if (PyStackRef_IsNull(next)) {
- null_or_index = PyStackRef_TagInt(-1);
- JUMPBY(oparg + 1);
- DISPATCH();
- }
- null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
- }
- else {
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- if (next_o == NULL) {
- if (_PyErr_Occurred(tstate)) {
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- if (!matches) {
- ERROR_NO_POP();
- }
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
- }
- /* iterator ended normally */
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- /* Jump forward oparg, then skip following END_FOR */
- JUMPBY(oparg + 1);
- DISPATCH();
+ _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index);
+ if (!PyStackRef_IsValid(item)) {
+ if (PyStackRef_IsError(item)) {
+ ERROR_NO_POP();
}
- next = PyStackRef_FromPyObjectSteal(next_o);
+ // Jump forward by oparg and skip the following END_FOR
+ JUMPBY(oparg + 1);
+ DISPATCH();
}
+ next = item;
}
op(_FOR_ITER_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) {
- /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- EXIT_IF(!PyStackRef_IsNull(null_or_index));
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- if (next_o == NULL) {
- if (_PyErr_Occurred(tstate)) {
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- if (!matches) {
- ERROR_NO_POP();
- }
- _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr);
- _PyErr_Clear(tstate);
+ _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index);
+ if (!PyStackRef_IsValid(item)) {
+ if (PyStackRef_IsError(item)) {
+ ERROR_NO_POP();
}
/* iterator ended normally */
/* The translator sets the deopt target just past the matching END_FOR */
EXIT_IF(true);
}
- next = PyStackRef_FromPyObjectSteal(next_o);
- // Common case: no jump, leave it to the code generator
+ next = item;
}
+
macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER;
inst(INSTRUMENTED_FOR_ITER, (unused/1, iter, null_or_index -- iter, null_or_index, next)) {
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (PyStackRef_IsTaggedInt(null_or_index)) {
- next = _PyForIter_NextWithIndex(iter_o, null_or_index);
- if (PyStackRef_IsNull(next)) {
- JUMPBY(oparg + 1);
- DISPATCH();
- }
- INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
- }
- else {
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- if (next_o != NULL) {
- next = PyStackRef_FromPyObjectSteal(next_o);
- INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
- }
- else {
- if (_PyErr_Occurred(tstate)) {
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- if (!matches) {
- ERROR_NO_POP();
- }
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
- }
- /* iterator ended normally */
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- /* Skip END_FOR */
- JUMPBY(oparg + 1);
- DISPATCH();
+ _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index);
+ if (!PyStackRef_IsValid(item)) {
+ if (PyStackRef_IsError(item)) {
+ ERROR_NO_POP();
}
+ // Jump forward by oparg and skip the following END_FOR
+ JUMPBY(oparg + 1);
+ DISPATCH();
}
+ next = item;
+ INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
}
-
op(_ITER_CHECK_LIST, (iter, null_or_index -- iter, null_or_index)) {
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
EXIT_IF(Py_TYPE(iter_o) != &PyList_Type);
diff --git a/Python/ceval.c b/Python/ceval.c
index 7aec196cb85..4cfe4bb88f4 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -3190,7 +3190,7 @@ _PyEval_FormatKwargsError(PyThreadState *tstate, PyObject *func, PyObject *kwarg
else if (_PyErr_ExceptionMatches(tstate, PyExc_KeyError)) {
PyObject *exc = _PyErr_GetRaisedException(tstate);
PyObject *args = PyException_GetArgs(exc);
- if (exc && PyTuple_Check(args) && PyTuple_GET_SIZE(args) == 1) {
+ if (PyTuple_Check(args) && PyTuple_GET_SIZE(args) == 1) {
_PyErr_Clear(tstate);
PyObject *funcstr = _PyObject_FunctionStr(func);
if (funcstr != NULL) {
@@ -3439,8 +3439,8 @@ _PyEval_LoadName(PyThreadState *tstate, _PyInterpreterFrame *frame, PyObject *na
return value;
}
-_PyStackRef
-_PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index)
+static _PyStackRef
+foriter_next(PyObject *seq, _PyStackRef index)
{
assert(PyStackRef_IsTaggedInt(index));
assert(PyTuple_CheckExact(seq) || PyList_CheckExact(seq));
@@ -3459,6 +3459,30 @@ _PyForIter_NextWithIndex(PyObject *seq, _PyStackRef index)
return PyStackRef_FromPyObjectSteal(item);
}
+_PyStackRef _PyForIter_VirtualIteratorNext(PyThreadState* tstate, _PyInterpreterFrame* frame, _PyStackRef iter, _PyStackRef* index_ptr)
+{
+ PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
+ _PyStackRef index = *index_ptr;
+ if (PyStackRef_IsTaggedInt(index)) {
+ *index_ptr = PyStackRef_IncrementTaggedIntNoOverflow(index);
+ return foriter_next(iter_o, index);
+ }
+ PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
+ if (next_o == NULL) {
+ if (_PyErr_Occurred(tstate)) {
+ if (_PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) {
+ _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr);
+ _PyErr_Clear(tstate);
+ }
+ else {
+ return PyStackRef_ERROR;
+ }
+ }
+ return PyStackRef_NULL;
+ }
+ return PyStackRef_FromPyObjectSteal(next_o);
+}
+
/* Check if a 'cls' provides the given special method. */
static inline int
type_has_special_method(PyTypeObject *cls, PyObject *name)
diff --git a/Python/crossinterp.c b/Python/crossinterp.c
index 13d91c508c4..5e73ab28f2b 100644
--- a/Python/crossinterp.c
+++ b/Python/crossinterp.c
@@ -70,6 +70,17 @@ runpy_run_path(const char *filename, const char *modname)
}
+static void
+set_exc_with_cause(PyObject *exctype, const char *msg)
+{
+ PyObject *cause = PyErr_GetRaisedException();
+ PyErr_SetString(exctype, msg);
+ PyObject *exc = PyErr_GetRaisedException();
+ PyException_SetCause(exc, cause);
+ PyErr_SetRaisedException(exc);
+}
+
+
static PyObject *
pyerr_get_message(PyObject *exc)
{
@@ -1314,7 +1325,7 @@ _excinfo_normalize_type(struct _excinfo_type *info,
}
static void
-_PyXI_excinfo_Clear(_PyXI_excinfo *info)
+_PyXI_excinfo_clear(_PyXI_excinfo *info)
{
_excinfo_clear_type(&info->type);
if (info->msg != NULL) {
@@ -1364,7 +1375,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc)
assert(exc != NULL);
if (PyErr_GivenExceptionMatches(exc, PyExc_MemoryError)) {
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
return NULL;
}
const char *failure = NULL;
@@ -1410,7 +1421,7 @@ _PyXI_excinfo_InitFromException(_PyXI_excinfo *info, PyObject *exc)
error:
assert(failure != NULL);
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
return failure;
}
@@ -1461,7 +1472,7 @@ _PyXI_excinfo_InitFromObject(_PyXI_excinfo *info, PyObject *obj)
error:
assert(failure != NULL);
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
return failure;
}
@@ -1656,7 +1667,7 @@ _PyXI_ExcInfoAsObject(_PyXI_excinfo *info)
void
_PyXI_ClearExcInfo(_PyXI_excinfo *info)
{
- _PyXI_excinfo_Clear(info);
+ _PyXI_excinfo_clear(info);
}
@@ -1694,6 +1705,14 @@ _PyXI_ApplyErrorCode(_PyXI_errcode code, PyInterpreterState *interp)
PyErr_SetString(PyExc_InterpreterError,
"failed to apply namespace to __main__");
break;
+ case _PyXI_ERR_PRESERVE_FAILURE:
+ PyErr_SetString(PyExc_InterpreterError,
+ "failed to preserve objects across session");
+ break;
+ case _PyXI_ERR_EXC_PROPAGATION_FAILURE:
+ PyErr_SetString(PyExc_InterpreterError,
+ "failed to transfer exception between interpreters");
+ break;
case _PyXI_ERR_NOT_SHAREABLE:
_set_xid_lookup_failure(tstate, NULL, NULL, NULL);
break;
@@ -1743,7 +1762,7 @@ _PyXI_InitError(_PyXI_error *error, PyObject *excobj, _PyXI_errcode code)
assert(excobj == NULL);
assert(code != _PyXI_ERR_NO_ERROR);
error->code = code;
- _PyXI_excinfo_Clear(&error->uncaught);
+ _PyXI_excinfo_clear(&error->uncaught);
}
return failure;
}
@@ -1753,7 +1772,7 @@ _PyXI_ApplyError(_PyXI_error *error)
{
PyThreadState *tstate = PyThreadState_Get();
if (error->code == _PyXI_ERR_UNCAUGHT_EXCEPTION) {
- // Raise an exception that proxies the propagated exception.
+ // We will raise an exception that proxies the propagated exception.
return _PyXI_excinfo_AsObject(&error->uncaught);
}
else if (error->code == _PyXI_ERR_NOT_SHAREABLE) {
@@ -1839,7 +1858,8 @@ _sharednsitem_has_value(_PyXI_namespace_item *item, int64_t *p_interpid)
}
static int
-_sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value)
+_sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value,
+ xidata_fallback_t fallback)
{
assert(_sharednsitem_is_initialized(item));
assert(item->xidata == NULL);
@@ -1848,8 +1868,7 @@ _sharednsitem_set_value(_PyXI_namespace_item *item, PyObject *value)
return -1;
}
PyThreadState *tstate = PyThreadState_Get();
- // XXX Use _PyObject_GetXIDataWithFallback()?
- if (_PyObject_GetXIDataNoFallback(tstate, value, item->xidata) != 0) {
+ if (_PyObject_GetXIData(tstate, value, fallback, item->xidata) < 0) {
PyMem_RawFree(item->xidata);
item->xidata = NULL;
// The caller may want to propagate PyExc_NotShareableError
@@ -1881,7 +1900,8 @@ _sharednsitem_clear(_PyXI_namespace_item *item)
}
static int
-_sharednsitem_copy_from_ns(struct _sharednsitem *item, PyObject *ns)
+_sharednsitem_copy_from_ns(struct _sharednsitem *item, PyObject *ns,
+ xidata_fallback_t fallback)
{
assert(item->name != NULL);
assert(item->xidata == NULL);
@@ -1893,7 +1913,7 @@ _sharednsitem_copy_from_ns(struct _sharednsitem *item, PyObject *ns)
// When applied, this item will be set to the default (or fail).
return 0;
}
- if (_sharednsitem_set_value(item, value) < 0) {
+ if (_sharednsitem_set_value(item, value, fallback) < 0) {
return -1;
}
return 0;
@@ -2144,18 +2164,21 @@ error:
return NULL;
}
-static void _propagate_not_shareable_error(_PyXI_session *);
+static void _propagate_not_shareable_error(_PyXI_errcode *);
static int
-_fill_sharedns(_PyXI_namespace *ns, PyObject *nsobj, _PyXI_session *session)
+_fill_sharedns(_PyXI_namespace *ns, PyObject *nsobj,
+ xidata_fallback_t fallback, _PyXI_errcode *p_errcode)
{
// All items are expected to be shareable.
assert(_sharedns_check_counts(ns));
assert(ns->numnames == ns->maxitems);
assert(ns->numvalues == 0);
for (Py_ssize_t i=0; i < ns->maxitems; i++) {
- if (_sharednsitem_copy_from_ns(&ns->items[i], nsobj) < 0) {
- _propagate_not_shareable_error(session);
+ if (_sharednsitem_copy_from_ns(&ns->items[i], nsobj, fallback) < 0) {
+ if (p_errcode != NULL) {
+ _propagate_not_shareable_error(p_errcode);
+ }
// Clear out the ones we set so far.
for (Py_ssize_t j=0; j < i; j++) {
_sharednsitem_clear_value(&ns->items[j]);
@@ -2221,6 +2244,18 @@ _apply_sharedns(_PyXI_namespace *ns, PyObject *nsobj, PyObject *dflt)
/* switched-interpreter sessions */
/*********************************/
+struct xi_session_error {
+ // This is set if the interpreter is entered and raised an exception
+ // that needs to be handled in some special way during exit.
+ _PyXI_errcode *override;
+ // This is set if exit captured an exception to propagate.
+ _PyXI_error *info;
+
+ // -- pre-allocated memory --
+ _PyXI_error _info;
+ _PyXI_errcode _override;
+};
+
struct xi_session {
#define SESSION_UNUSED 0
#define SESSION_ACTIVE 1
@@ -2249,18 +2284,14 @@ struct xi_session {
// beginning of the session as a convenience.
PyObject *main_ns;
- // This is set if the interpreter is entered and raised an exception
- // that needs to be handled in some special way during exit.
- _PyXI_errcode *error_override;
- // This is set if exit captured an exception to propagate.
- _PyXI_error *error;
+ // This is a dict of objects that will be available (via sharing)
+ // once the session exits. Do not access this directly; use
+ // _PyXI_Preserve() and _PyXI_GetPreserved() instead;
+ PyObject *_preserved;
- // -- pre-allocated memory --
- _PyXI_error _error;
- _PyXI_errcode _error_override;
+ struct xi_session_error error;
};
-
_PyXI_session *
_PyXI_NewSession(void)
{
@@ -2286,9 +2317,25 @@ _session_is_active(_PyXI_session *session)
return session->status == SESSION_ACTIVE;
}
-static int _ensure_main_ns(_PyXI_session *);
+static int
+_session_pop_error(_PyXI_session *session, struct xi_session_error *err)
+{
+ if (session->error.info == NULL) {
+ assert(session->error.override == NULL);
+ *err = (struct xi_session_error){0};
+ return 0;
+ }
+ *err = session->error;
+ err->info = &err->_info;
+ if (err->override != NULL) {
+ err->override = &err->_override;
+ }
+ session->error = (struct xi_session_error){0};
+ return 1;
+}
+
+static int _ensure_main_ns(_PyXI_session *, _PyXI_errcode *);
static inline void _session_set_error(_PyXI_session *, _PyXI_errcode);
-static void _capture_current_exception(_PyXI_session *);
/* enter/exit a cross-interpreter session */
@@ -2305,9 +2352,9 @@ _enter_session(_PyXI_session *session, PyInterpreterState *interp)
assert(!session->running);
assert(session->main_ns == NULL);
// Set elsewhere and cleared in _capture_current_exception().
- assert(session->error_override == NULL);
- // Set elsewhere and cleared in _PyXI_ApplyCapturedException().
- assert(session->error == NULL);
+ assert(session->error.override == NULL);
+ // Set elsewhere and cleared in _PyXI_Exit().
+ assert(session->error.info == NULL);
// Switch to interpreter.
PyThreadState *tstate = PyThreadState_Get();
@@ -2336,14 +2383,16 @@ _exit_session(_PyXI_session *session)
PyThreadState *tstate = session->init_tstate;
assert(tstate != NULL);
assert(PyThreadState_Get() == tstate);
+ assert(!_PyErr_Occurred(tstate));
// Release any of the entered interpreters resources.
Py_CLEAR(session->main_ns);
+ Py_CLEAR(session->_preserved);
// Ensure this thread no longer owns __main__.
if (session->running) {
_PyInterpreterState_SetNotRunningMain(tstate->interp);
- assert(!PyErr_Occurred());
+ assert(!_PyErr_Occurred(tstate));
session->running = 0;
}
@@ -2360,21 +2409,16 @@ _exit_session(_PyXI_session *session)
assert(!session->own_init_tstate);
}
- // For now the error data persists past the exit.
- *session = (_PyXI_session){
- .error_override = session->error_override,
- .error = session->error,
- ._error = session->_error,
- ._error_override = session->_error_override,
- };
+ assert(session->error.info == NULL);
+ assert(session->error.override == _PyXI_ERR_NO_ERROR);
+
+ *session = (_PyXI_session){0};
}
static void
-_propagate_not_shareable_error(_PyXI_session *session)
+_propagate_not_shareable_error(_PyXI_errcode *p_errcode)
{
- if (session == NULL) {
- return;
- }
+ assert(p_errcode != NULL);
PyThreadState *tstate = PyThreadState_Get();
PyObject *exctype = get_notshareableerror_type(tstate);
if (exctype == NULL) {
@@ -2384,46 +2428,46 @@ _propagate_not_shareable_error(_PyXI_session *session)
}
if (PyErr_ExceptionMatches(exctype)) {
// We want to propagate the exception directly.
- _session_set_error(session, _PyXI_ERR_NOT_SHAREABLE);
+ *p_errcode = _PyXI_ERR_NOT_SHAREABLE;
}
}
-PyObject *
-_PyXI_ApplyCapturedException(_PyXI_session *session)
-{
- assert(!PyErr_Occurred());
- assert(session->error != NULL);
- PyObject *res = _PyXI_ApplyError(session->error);
- assert((res == NULL) != (PyErr_Occurred() == NULL));
- session->error = NULL;
- return res;
-}
-
-int
-_PyXI_HasCapturedException(_PyXI_session *session)
-{
- return session->error != NULL;
-}
-
int
_PyXI_Enter(_PyXI_session *session,
- PyInterpreterState *interp, PyObject *nsupdates)
+ PyInterpreterState *interp, PyObject *nsupdates,
+ _PyXI_session_result *result)
{
// Convert the attrs for cross-interpreter use.
_PyXI_namespace *sharedns = NULL;
if (nsupdates != NULL) {
Py_ssize_t len = PyDict_Size(nsupdates);
if (len < 0) {
+ if (result != NULL) {
+ result->errcode = _PyXI_ERR_APPLY_NS_FAILURE;
+ }
return -1;
}
if (len > 0) {
sharedns = _create_sharedns(nsupdates);
if (sharedns == NULL) {
+ if (result != NULL) {
+ result->errcode = _PyXI_ERR_APPLY_NS_FAILURE;
+ }
return -1;
}
- if (_fill_sharedns(sharedns, nsupdates, NULL) < 0) {
- assert(session->error == NULL);
+ // For now we limit it to shareable objects.
+ xidata_fallback_t fallback = _PyXIDATA_XIDATA_ONLY;
+ _PyXI_errcode errcode = _PyXI_ERR_NO_ERROR;
+ if (_fill_sharedns(sharedns, nsupdates, fallback, &errcode) < 0) {
+ assert(PyErr_Occurred());
+ assert(session->error.info == NULL);
+ if (errcode == _PyXI_ERR_NO_ERROR) {
+ errcode = _PyXI_ERR_UNCAUGHT_EXCEPTION;
+ }
_destroy_sharedns(sharedns);
+ if (result != NULL) {
+ result->errcode = errcode;
+ }
return -1;
}
}
@@ -2445,8 +2489,7 @@ _PyXI_Enter(_PyXI_session *session,
// Apply the cross-interpreter data.
if (sharedns != NULL) {
- if (_ensure_main_ns(session) < 0) {
- errcode = _PyXI_ERR_MAIN_NS_FAILURE;
+ if (_ensure_main_ns(session, &errcode) < 0) {
goto error;
}
if (_apply_sharedns(sharedns, session->main_ns, NULL) < 0) {
@@ -2462,19 +2505,124 @@ _PyXI_Enter(_PyXI_session *session,
error:
// We want to propagate all exceptions here directly (best effort).
+ assert(errcode != _PyXI_ERR_NO_ERROR);
_session_set_error(session, errcode);
+ assert(!PyErr_Occurred());
+
+ // Exit the session.
+ struct xi_session_error err;
+ (void)_session_pop_error(session, &err);
_exit_session(session);
+
if (sharedns != NULL) {
_destroy_sharedns(sharedns);
}
+
+ // Apply the error from the other interpreter.
+ PyObject *excinfo = _PyXI_ApplyError(err.info);
+ _PyXI_excinfo_clear(&err.info->uncaught);
+ if (excinfo != NULL) {
+ if (result != NULL) {
+ result->excinfo = excinfo;
+ }
+ else {
+#ifdef Py_DEBUG
+ fprintf(stderr, "_PyXI_Enter(): uncaught exception discarded");
+#endif
+ }
+ }
+ assert(PyErr_Occurred());
+
return -1;
}
-void
-_PyXI_Exit(_PyXI_session *session)
+static int _pop_preserved(_PyXI_session *, _PyXI_namespace **, PyObject **,
+ _PyXI_errcode *);
+static int _finish_preserved(_PyXI_namespace *, PyObject **);
+
+int
+_PyXI_Exit(_PyXI_session *session, _PyXI_errcode errcode,
+ _PyXI_session_result *result)
{
- _capture_current_exception(session);
+ int res = 0;
+
+ // Capture the raised exception, if any.
+ assert(session->error.info == NULL);
+ if (PyErr_Occurred()) {
+ _session_set_error(session, errcode);
+ assert(!PyErr_Occurred());
+ }
+ else {
+ assert(errcode == _PyXI_ERR_NO_ERROR);
+ assert(session->error.override == NULL);
+ }
+
+ // Capture the preserved namespace.
+ _PyXI_namespace *preserved = NULL;
+ PyObject *preservedobj = NULL;
+ if (result != NULL) {
+ errcode = _PyXI_ERR_NO_ERROR;
+ if (_pop_preserved(session, &preserved, &preservedobj, &errcode) < 0) {
+ if (session->error.info != NULL) {
+ // XXX Chain the exception (i.e. set __context__)?
+ PyErr_FormatUnraisable(
+ "Exception ignored while capturing preserved objects");
+ }
+ else {
+ _session_set_error(session, errcode);
+ }
+ }
+ }
+
+ // Exit the session.
+ struct xi_session_error err;
+ (void)_session_pop_error(session, &err);
_exit_session(session);
+
+ // Restore the preserved namespace.
+ assert(preserved == NULL || preservedobj == NULL);
+ if (_finish_preserved(preserved, &preservedobj) < 0) {
+ assert(preservedobj == NULL);
+ if (err.info != NULL) {
+ // XXX Chain the exception (i.e. set __context__)?
+ PyErr_FormatUnraisable(
+ "Exception ignored while capturing preserved objects");
+ }
+ else {
+ errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ _propagate_not_shareable_error(&errcode);
+ }
+ }
+ if (result != NULL) {
+ result->preserved = preservedobj;
+ result->errcode = errcode;
+ }
+
+ // Apply the error from the other interpreter, if any.
+ if (err.info != NULL) {
+ res = -1;
+ assert(!PyErr_Occurred());
+ PyObject *excinfo = _PyXI_ApplyError(err.info);
+ _PyXI_excinfo_clear(&err.info->uncaught);
+ if (excinfo == NULL) {
+ assert(PyErr_Occurred());
+ if (result != NULL) {
+ _PyXI_ClearResult(result);
+ *result = (_PyXI_session_result){
+ .errcode = _PyXI_ERR_EXC_PROPAGATION_FAILURE,
+ };
+ }
+ }
+ else if (result != NULL) {
+ result->excinfo = excinfo;
+ }
+ else {
+#ifdef Py_DEBUG
+ fprintf(stderr, "_PyXI_Exit(): uncaught exception discarded");
+#endif
+ }
+ }
+ return res;
}
@@ -2483,15 +2631,15 @@ _PyXI_Exit(_PyXI_session *session)
static void
_capture_current_exception(_PyXI_session *session)
{
- assert(session->error == NULL);
+ assert(session->error.info == NULL);
if (!PyErr_Occurred()) {
- assert(session->error_override == NULL);
+ assert(session->error.override == NULL);
return;
}
// Handle the exception override.
- _PyXI_errcode *override = session->error_override;
- session->error_override = NULL;
+ _PyXI_errcode *override = session->error.override;
+ session->error.override = NULL;
_PyXI_errcode errcode = override != NULL
? *override
: _PyXI_ERR_UNCAUGHT_EXCEPTION;
@@ -2514,7 +2662,7 @@ _capture_current_exception(_PyXI_session *session)
}
// Capture the exception.
- _PyXI_error *err = &session->_error;
+ _PyXI_error *err = &session->error._info;
*err = (_PyXI_error){
.interp = session->init_tstate->interp,
};
@@ -2541,7 +2689,7 @@ _capture_current_exception(_PyXI_session *session)
// Finished!
assert(!PyErr_Occurred());
- session->error = err;
+ session->error.info = err;
}
static inline void
@@ -2549,15 +2697,19 @@ _session_set_error(_PyXI_session *session, _PyXI_errcode errcode)
{
assert(_session_is_active(session));
assert(PyErr_Occurred());
+ if (errcode == _PyXI_ERR_NO_ERROR) {
+ // We're a bit forgiving here.
+ errcode = _PyXI_ERR_UNCAUGHT_EXCEPTION;
+ }
if (errcode != _PyXI_ERR_UNCAUGHT_EXCEPTION) {
- session->_error_override = errcode;
- session->error_override = &session->_error_override;
+ session->error._override = errcode;
+ session->error.override = &session->error._override;
}
_capture_current_exception(session);
}
static int
-_ensure_main_ns(_PyXI_session *session)
+_ensure_main_ns(_PyXI_session *session, _PyXI_errcode *p_errcode)
{
assert(_session_is_active(session));
if (session->main_ns != NULL) {
@@ -2566,11 +2718,17 @@ _ensure_main_ns(_PyXI_session *session)
// Cache __main__.__dict__.
PyObject *main_mod = _Py_GetMainModule(session->init_tstate);
if (_Py_CheckMainModule(main_mod) < 0) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_MAIN_NS_FAILURE;
+ }
return -1;
}
PyObject *ns = PyModule_GetDict(main_mod); // borrowed
Py_DECREF(main_mod);
if (ns == NULL) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_MAIN_NS_FAILURE;
+ }
return -1;
}
session->main_ns = Py_NewRef(ns);
@@ -2578,21 +2736,150 @@ _ensure_main_ns(_PyXI_session *session)
}
PyObject *
-_PyXI_GetMainNamespace(_PyXI_session *session)
+_PyXI_GetMainNamespace(_PyXI_session *session, _PyXI_errcode *p_errcode)
{
if (!_session_is_active(session)) {
PyErr_SetString(PyExc_RuntimeError, "session not active");
return NULL;
}
- if (_ensure_main_ns(session) < 0) {
- _session_set_error(session, _PyXI_ERR_MAIN_NS_FAILURE);
- _capture_current_exception(session);
+ if (_ensure_main_ns(session, p_errcode) < 0) {
return NULL;
}
return session->main_ns;
}
+static int
+_pop_preserved(_PyXI_session *session,
+ _PyXI_namespace **p_xidata, PyObject **p_obj,
+ _PyXI_errcode *p_errcode)
+{
+ assert(_PyThreadState_GET() == session->init_tstate); // active session
+ if (session->_preserved == NULL) {
+ *p_xidata = NULL;
+ *p_obj = NULL;
+ return 0;
+ }
+ if (session->init_tstate == session->prev_tstate) {
+ // We did not switch interpreters.
+ *p_xidata = NULL;
+ *p_obj = session->_preserved;
+ session->_preserved = NULL;
+ return 0;
+ }
+ *p_obj = NULL;
+
+ // We did switch interpreters.
+ Py_ssize_t len = PyDict_Size(session->_preserved);
+ if (len < 0) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ else if (len == 0) {
+ *p_xidata = NULL;
+ }
+ else {
+ _PyXI_namespace *xidata = _create_sharedns(session->_preserved);
+ if (xidata == NULL) {
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ _PyXI_errcode errcode = _PyXI_ERR_NO_ERROR;
+ if (_fill_sharedns(xidata, session->_preserved,
+ _PyXIDATA_FULL_FALLBACK, &errcode) < 0)
+ {
+ assert(session->error.info == NULL);
+ if (errcode != _PyXI_ERR_NOT_SHAREABLE) {
+ errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ if (p_errcode != NULL) {
+ *p_errcode = errcode;
+ }
+ _destroy_sharedns(xidata);
+ return -1;
+ }
+ *p_xidata = xidata;
+ }
+ Py_CLEAR(session->_preserved);
+ return 0;
+}
+
+static int
+_finish_preserved(_PyXI_namespace *xidata, PyObject **p_preserved)
+{
+ if (xidata == NULL) {
+ return 0;
+ }
+ int res = -1;
+ if (p_preserved != NULL) {
+ PyObject *ns = PyDict_New();
+ if (ns == NULL) {
+ goto finally;
+ }
+ if (_apply_sharedns(xidata, ns, NULL) < 0) {
+ Py_CLEAR(ns);
+ goto finally;
+ }
+ *p_preserved = ns;
+ }
+ res = 0;
+
+finally:
+ _destroy_sharedns(xidata);
+ return res;
+}
+
+int
+_PyXI_Preserve(_PyXI_session *session, const char *name, PyObject *value,
+ _PyXI_errcode *p_errcode)
+{
+ if (!_session_is_active(session)) {
+ PyErr_SetString(PyExc_RuntimeError, "session not active");
+ return -1;
+ }
+ if (session->_preserved == NULL) {
+ session->_preserved = PyDict_New();
+ if (session->_preserved == NULL) {
+ set_exc_with_cause(PyExc_RuntimeError,
+ "failed to initialize preserved objects");
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ }
+ if (PyDict_SetItemString(session->_preserved, name, value) < 0) {
+ set_exc_with_cause(PyExc_RuntimeError, "failed to preserve object");
+ if (p_errcode != NULL) {
+ *p_errcode = _PyXI_ERR_PRESERVE_FAILURE;
+ }
+ return -1;
+ }
+ return 0;
+}
+
+PyObject *
+_PyXI_GetPreserved(_PyXI_session_result *result, const char *name)
+{
+ PyObject *value = NULL;
+ if (result->preserved != NULL) {
+ (void)PyDict_GetItemStringRef(result->preserved, name, &value);
+ }
+ return value;
+}
+
+void
+_PyXI_ClearResult(_PyXI_session_result *result)
+{
+ Py_CLEAR(result->preserved);
+ Py_CLEAR(result->excinfo);
+}
+
+
/*********************/
/* runtime lifecycle */
/*********************/
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index 35b29940cb4..d19605169d5 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -4268,33 +4268,20 @@
_PyStackRef next;
null_or_index = stack_pointer[-1];
iter = stack_pointer[-2];
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (!PyStackRef_IsNull(null_or_index)) {
- UOP_STAT_INC(uopcode, miss);
- JUMP_TO_JUMP_TARGET();
- }
_PyFrame_SetStackPointer(frame, stack_pointer);
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
+ _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index);
stack_pointer = _PyFrame_GetStackPointer(frame);
- if (next_o == NULL) {
- if (_PyErr_Occurred(tstate)) {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (!matches) {
- JUMP_TO_ERROR();
- }
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _PyEval_MonitorRaise(tstate, frame, frame->instr_ptr);
- _PyErr_Clear(tstate);
- stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (!PyStackRef_IsValid(item)) {
+ if (PyStackRef_IsError(item)) {
+ JUMP_TO_ERROR();
}
if (true) {
UOP_STAT_INC(uopcode, miss);
JUMP_TO_JUMP_TARGET();
}
}
- next = PyStackRef_FromPyObjectSteal(next_o);
+ next = item;
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
diff --git a/Python/flowgraph.c b/Python/flowgraph.c
index 67ccf350b72..2adc8c84d83 100644
--- a/Python/flowgraph.c
+++ b/Python/flowgraph.c
@@ -2870,9 +2870,11 @@ optimize_load_fast(cfg_builder *g)
// how many inputs should be left on the stack.
// Opcodes that consume no inputs
+ case FORMAT_SIMPLE:
case GET_ANEXT:
case GET_ITER:
case GET_LEN:
+ case GET_YIELD_FROM_ITER:
case IMPORT_FROM:
case MATCH_KEYS:
case MATCH_MAPPING:
@@ -2907,6 +2909,16 @@ optimize_load_fast(cfg_builder *g)
break;
}
+ case END_SEND:
+ case SET_FUNCTION_ATTRIBUTE: {
+ assert(_PyOpcode_num_popped(opcode, oparg) == 2);
+ assert(_PyOpcode_num_pushed(opcode, oparg) == 1);
+ ref tos = ref_stack_pop(&refs);
+ ref_stack_pop(&refs);
+ PUSH_REF(tos.instr, tos.local);
+ break;
+ }
+
// Opcodes that consume some inputs and push new values
case CHECK_EXC_MATCH: {
ref_stack_pop(&refs);
@@ -2936,6 +2948,14 @@ optimize_load_fast(cfg_builder *g)
break;
}
+ case LOAD_SPECIAL:
+ case PUSH_EXC_INFO: {
+ ref tos = ref_stack_pop(&refs);
+ PUSH_REF(i, NOT_LOCAL);
+ PUSH_REF(tos.instr, tos.local);
+ break;
+ }
+
case SEND: {
load_fast_push_block(&sp, instr->i_target, refs.size);
ref_stack_pop(&refs);
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 5be2671700a..c8825df3ade 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -5753,43 +5753,18 @@
}
// _FOR_ITER
{
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (PyStackRef_IsTaggedInt(null_or_index)) {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- next = _PyForIter_NextWithIndex(iter_o, null_or_index);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (PyStackRef_IsNull(next)) {
- null_or_index = PyStackRef_TagInt(-1);
- JUMPBY(oparg + 1);
- stack_pointer[-1] = null_or_index;
- DISPATCH();
- }
- null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index);
- }
- else {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (next_o == NULL) {
- if (_PyErr_Occurred(tstate)) {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (!matches) {
- JUMP_TO_LABEL(error);
- }
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- }
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- JUMPBY(oparg + 1);
- DISPATCH();
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (!PyStackRef_IsValid(item)) {
+ if (PyStackRef_IsError(item)) {
+ JUMP_TO_LABEL(error);
}
- next = PyStackRef_FromPyObjectSteal(next_o);
+ JUMPBY(oparg + 1);
+ stack_pointer[-1] = null_or_index;
+ DISPATCH();
}
+ next = item;
}
stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
@@ -7061,44 +7036,20 @@
/* Skip 1 cache entry */
null_or_index = stack_pointer[-1];
iter = stack_pointer[-2];
- PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
- if (PyStackRef_IsTaggedInt(null_or_index)) {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- next = _PyForIter_NextWithIndex(iter_o, null_or_index);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (PyStackRef_IsNull(next)) {
- JUMPBY(oparg + 1);
- DISPATCH();
- }
- INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
- }
- else {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (next_o != NULL) {
- next = PyStackRef_FromPyObjectSteal(next_o);
- INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
- }
- else {
- if (_PyErr_Occurred(tstate)) {
- _PyFrame_SetStackPointer(frame, stack_pointer);
- int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (!matches) {
- JUMP_TO_LABEL(error);
- }
- _PyFrame_SetStackPointer(frame, stack_pointer);
- _PyEval_MonitorRaise(tstate, frame, this_instr);
- _PyErr_Clear(tstate);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- }
- assert(next_instr[oparg].op.code == END_FOR ||
- next_instr[oparg].op.code == INSTRUMENTED_END_FOR);
- JUMPBY(oparg + 1);
- DISPATCH();
+ _PyFrame_SetStackPointer(frame, stack_pointer);
+ _PyStackRef item = _PyForIter_VirtualIteratorNext(tstate, frame, iter, &null_or_index);
+ stack_pointer = _PyFrame_GetStackPointer(frame);
+ if (!PyStackRef_IsValid(item)) {
+ if (PyStackRef_IsError(item)) {
+ JUMP_TO_LABEL(error);
}
+ JUMPBY(oparg + 1);
+ stack_pointer[-1] = null_or_index;
+ DISPATCH();
}
+ next = item;
+ INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT);
+ stack_pointer[-1] = null_or_index;
stack_pointer[0] = next;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
diff --git a/Python/import.c b/Python/import.c
index 98557991378..184dede335d 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -3964,8 +3964,10 @@ PyImport_Import(PyObject *module_name)
if (globals != NULL) {
Py_INCREF(globals);
builtins = PyObject_GetItem(globals, &_Py_ID(__builtins__));
- if (builtins == NULL)
+ if (builtins == NULL) {
+ // XXX Fall back to interp->builtins or sys.modules['builtins']?
goto err;
+ }
}
else {
/* No globals -- use standard builtins, and fake globals */
diff --git a/Python/lock.c b/Python/lock.c
index 28a12ad1835..b125ad0c9e3 100644
--- a/Python/lock.c
+++ b/Python/lock.c
@@ -119,6 +119,9 @@ _PyMutex_LockTimed(PyMutex *m, PyTime_t timeout, _PyLockFlags flags)
return PY_LOCK_INTR;
}
}
+ else if (ret == Py_PARK_INTR && (flags & _PY_FAIL_IF_INTERRUPTED)) {
+ return PY_LOCK_INTR;
+ }
else if (ret == Py_PARK_TIMEOUT) {
assert(timeout >= 0);
return PY_LOCK_FAILURE;
diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c
index e1209209660..b4220e2c627 100644
--- a/Python/optimizer_bytecodes.c
+++ b/Python/optimizer_bytecodes.c
@@ -840,6 +840,17 @@ dummy_func(void) {
value = sym_new_unknown(ctx);
}
+ op(_GET_ITER, (iterable -- iter, index_or_null)) {
+ if (sym_matches_type(iterable, &PyTuple_Type) || sym_matches_type(iterable, &PyList_Type)) {
+ iter = iterable;
+ index_or_null = sym_new_not_null(ctx);
+ }
+ else {
+ iter = sym_new_not_null(ctx);
+ index_or_null = sym_new_unknown(ctx);
+ }
+ }
+
op(_FOR_ITER_GEN_FRAME, (unused, unused -- unused, unused, gen_frame: _Py_UOpsAbstractFrame*)) {
gen_frame = NULL;
/* We are about to hit the end of the trace */
diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h
index db86edcc785..960c6838004 100644
--- a/Python/optimizer_cases.c.h
+++ b/Python/optimizer_cases.c.h
@@ -1562,10 +1562,18 @@
}
case _GET_ITER: {
+ JitOptSymbol *iterable;
JitOptSymbol *iter;
JitOptSymbol *index_or_null;
- iter = sym_new_not_null(ctx);
- index_or_null = sym_new_not_null(ctx);
+ iterable = stack_pointer[-1];
+ if (sym_matches_type(iterable, &PyTuple_Type) || sym_matches_type(iterable, &PyList_Type)) {
+ iter = iterable;
+ index_or_null = sym_new_not_null(ctx);
+ }
+ else {
+ iter = sym_new_not_null(ctx);
+ index_or_null = sym_new_unknown(ctx);
+ }
stack_pointer[-1] = iter;
stack_pointer[0] = index_or_null;
stack_pointer += 1;
diff --git a/Python/parking_lot.c b/Python/parking_lot.c
index 8edf4323594..e896dea0271 100644
--- a/Python/parking_lot.c
+++ b/Python/parking_lot.c
@@ -112,17 +112,27 @@ _PySemaphore_PlatformWait(_PySemaphore *sema, PyTime_t timeout)
}
}
- // NOTE: we wait on the sigint event even in non-main threads to match the
- // behavior of the other platforms. Non-main threads will ignore the
- // Py_PARK_INTR result.
- HANDLE sigint_event = _PyOS_SigintEvent();
- HANDLE handles[2] = { sema->platform_sem, sigint_event };
- DWORD count = sigint_event != NULL ? 2 : 1;
+ HANDLE handles[2] = { sema->platform_sem, NULL };
+ HANDLE sigint_event = NULL;
+ DWORD count = 1;
+ if (_Py_IsMainThread()) {
+ // gh-135099: Wait on the SIGINT event only in the main thread. Other
+ // threads would ignore the result anyways, and accessing
+ // `_PyOS_SigintEvent()` from non-main threads may race with
+ // interpreter shutdown, which closes the event handle. Note that
+ // non-main interpreters will ignore the result.
+ sigint_event = _PyOS_SigintEvent();
+ if (sigint_event != NULL) {
+ handles[1] = sigint_event;
+ count = 2;
+ }
+ }
wait = WaitForMultipleObjects(count, handles, FALSE, millis);
if (wait == WAIT_OBJECT_0) {
res = Py_PARK_OK;
}
else if (wait == WAIT_OBJECT_0 + 1) {
+ assert(sigint_event != NULL);
ResetEvent(sigint_event);
res = Py_PARK_INTR;
}
diff --git a/Python/perf_jit_trampoline.c b/Python/perf_jit_trampoline.c
index 5c7cb5b0a99..2ca18c23593 100644
--- a/Python/perf_jit_trampoline.c
+++ b/Python/perf_jit_trampoline.c
@@ -869,7 +869,11 @@ static void elf_init_ehframe(ELFObjectContext* ctx) {
*/
#ifdef __x86_64__
/* x86_64 calling convention unwinding rules */
+# if defined(__CET__) && (__CET__ & 1)
+ DWRF_U8(DWRF_CFA_advance_loc | 8); // Advance location by 8 bytes when CET protection is enabled
+# else
DWRF_U8(DWRF_CFA_advance_loc | 4); // Advance location by 4 bytes
+# endif
DWRF_U8(DWRF_CFA_def_cfa_offset); // Redefine CFA offset
DWRF_UV(16); // New offset: SP + 16
DWRF_U8(DWRF_CFA_advance_loc | 6); // Advance location by 6 bytes
diff --git a/Python/stackrefs.c b/Python/stackrefs.c
index b2a1369031a..ecc0012ef17 100644
--- a/Python/stackrefs.c
+++ b/Python/stackrefs.c
@@ -40,6 +40,7 @@ make_table_entry(PyObject *obj, const char *filename, int linenumber)
PyObject *
_Py_stackref_get_object(_PyStackRef ref)
{
+ assert(!PyStackRef_IsError(ref));
if (ref.index == 0) {
return NULL;
}
@@ -64,6 +65,7 @@ PyStackRef_Is(_PyStackRef a, _PyStackRef b)
PyObject *
_Py_stackref_close(_PyStackRef ref, const char *filename, int linenumber)
{
+ assert(!PyStackRef_IsError(ref));
PyInterpreterState *interp = PyInterpreterState_Get();
if (ref.index >= interp->next_stackref) {
_Py_FatalErrorFormat(__func__, "Invalid StackRef with ID %" PRIu64 " at %s:%d\n", (void *)ref.index, filename, linenumber);
@@ -128,6 +130,7 @@ _Py_stackref_create(PyObject *obj, const char *filename, int linenumber)
void
_Py_stackref_record_borrow(_PyStackRef ref, const char *filename, int linenumber)
{
+ assert(!PyStackRef_IsError(ref));
if (ref.index < INITIAL_STACKREF_INDEX) {
return;
}
@@ -152,6 +155,7 @@ _Py_stackref_record_borrow(_PyStackRef ref, const char *filename, int linenumber
void
_Py_stackref_associate(PyInterpreterState *interp, PyObject *obj, _PyStackRef ref)
{
+ assert(!PyStackRef_IsError(ref));
assert(ref.index < INITIAL_STACKREF_INDEX);
TableEntry *entry = make_table_entry(obj, "builtin-object", 0);
if (entry == NULL) {
diff --git a/Python/thread.c b/Python/thread.c
index 4ff5f11a348..18c4af7f634 100644
--- a/Python/thread.c
+++ b/Python/thread.c
@@ -39,7 +39,8 @@
const long long PY_TIMEOUT_MAX = PY_TIMEOUT_MAX_VALUE;
-static void PyThread__init_thread(void); /* Forward */
+/* Forward declaration */
+static void PyThread__init_thread(void);
#define initialized _PyRuntime.threads.initialized
@@ -71,6 +72,79 @@ PyThread_init_thread(void)
#endif
+/*
+ * Lock support.
+ */
+
+PyThread_type_lock
+PyThread_allocate_lock(void)
+{
+ if (!initialized) {
+ PyThread_init_thread();
+ }
+
+ PyMutex *lock = (PyMutex *)PyMem_RawMalloc(sizeof(PyMutex));
+ if (lock) {
+ *lock = (PyMutex){0};
+ }
+
+ return (PyThread_type_lock)lock;
+}
+
+void
+PyThread_free_lock(PyThread_type_lock lock)
+{
+ PyMem_RawFree(lock);
+}
+
+PyLockStatus
+PyThread_acquire_lock_timed(PyThread_type_lock lock, PY_TIMEOUT_T microseconds,
+ int intr_flag)
+{
+ PyTime_t timeout; // relative timeout
+ if (microseconds >= 0) {
+ // bpo-41710: PyThread_acquire_lock_timed() cannot report timeout
+ // overflow to the caller, so clamp the timeout to
+ // [PyTime_MIN, PyTime_MAX].
+ //
+ // PyTime_MAX nanoseconds is around 292.3 years.
+ //
+ // _thread.Lock.acquire() and _thread.RLock.acquire() raise an
+ // OverflowError if microseconds is greater than PY_TIMEOUT_MAX.
+ timeout = _PyTime_FromMicrosecondsClamp(microseconds);
+ }
+ else {
+ timeout = -1;
+ }
+
+ _PyLockFlags flags = _Py_LOCK_DONT_DETACH;
+ if (intr_flag) {
+ flags |= _PY_FAIL_IF_INTERRUPTED;
+ }
+
+ return _PyMutex_LockTimed((PyMutex *)lock, timeout, flags);
+}
+
+void
+PyThread_release_lock(PyThread_type_lock lock)
+{
+ PyMutex_Unlock((PyMutex *)lock);
+}
+
+int
+_PyThread_at_fork_reinit(PyThread_type_lock *lock)
+{
+ _PyMutex_at_fork_reinit((PyMutex *)lock);
+ return 0;
+}
+
+int
+PyThread_acquire_lock(PyThread_type_lock lock, int waitflag)
+{
+ return PyThread_acquire_lock_timed(lock, waitflag ? -1 : 0, /*intr_flag=*/0);
+}
+
+
/* return the current thread stack size */
size_t
PyThread_get_stacksize(void)
@@ -261,11 +335,7 @@ PyThread_GetInfo(void)
#ifdef HAVE_PTHREAD_STUBS
value = Py_NewRef(Py_None);
#elif defined(_POSIX_THREADS)
-#ifdef USE_SEMAPHORES
- value = PyUnicode_FromString("semaphore");
-#else
- value = PyUnicode_FromString("mutex+cond");
-#endif
+ value = PyUnicode_FromString("pymutex");
if (value == NULL) {
Py_DECREF(threadinfo);
return NULL;
diff --git a/Python/thread_nt.h b/Python/thread_nt.h
index e078b98be3c..9a29d14ef67 100644
--- a/Python/thread_nt.h
+++ b/Python/thread_nt.h
@@ -300,98 +300,6 @@ PyThread_hang_thread(void)
}
}
-/*
- * Lock support. It has to be implemented as semaphores.
- * I [Dag] tried to implement it with mutex but I could find a way to
- * tell whether a thread already own the lock or not.
- */
-PyThread_type_lock
-PyThread_allocate_lock(void)
-{
- PNRMUTEX mutex;
-
- if (!initialized)
- PyThread_init_thread();
-
- mutex = AllocNonRecursiveMutex() ;
-
- PyThread_type_lock aLock = (PyThread_type_lock) mutex;
- assert(aLock);
-
- return aLock;
-}
-
-void
-PyThread_free_lock(PyThread_type_lock aLock)
-{
- FreeNonRecursiveMutex(aLock) ;
-}
-
-// WaitForSingleObject() accepts timeout in milliseconds in the range
-// [0; 0xFFFFFFFE] (DWORD type). INFINITE value (0xFFFFFFFF) means no
-// timeout. 0xFFFFFFFE milliseconds is around 49.7 days.
-const DWORD TIMEOUT_MS_MAX = 0xFFFFFFFE;
-
-/*
- * Return 1 on success if the lock was acquired
- *
- * and 0 if the lock was not acquired. This means a 0 is returned
- * if the lock has already been acquired by this thread!
- */
-PyLockStatus
-PyThread_acquire_lock_timed(PyThread_type_lock aLock,
- PY_TIMEOUT_T microseconds, int intr_flag)
-{
- assert(aLock);
-
- /* Fow now, intr_flag does nothing on Windows, and lock acquires are
- * uninterruptible. */
- PyLockStatus success;
- PY_TIMEOUT_T milliseconds;
-
- if (microseconds >= 0) {
- milliseconds = microseconds / 1000;
- // Round milliseconds away from zero
- if (microseconds % 1000 > 0) {
- milliseconds++;
- }
- if (milliseconds > (PY_TIMEOUT_T)TIMEOUT_MS_MAX) {
- // bpo-41710: PyThread_acquire_lock_timed() cannot report timeout
- // overflow to the caller, so clamp the timeout to
- // [0, TIMEOUT_MS_MAX] milliseconds.
- //
- // _thread.Lock.acquire() and _thread.RLock.acquire() raise an
- // OverflowError if microseconds is greater than PY_TIMEOUT_MAX.
- milliseconds = TIMEOUT_MS_MAX;
- }
- assert(milliseconds != INFINITE);
- }
- else {
- milliseconds = INFINITE;
- }
-
- if (EnterNonRecursiveMutex((PNRMUTEX)aLock,
- (DWORD)milliseconds) == WAIT_OBJECT_0) {
- success = PY_LOCK_ACQUIRED;
- }
- else {
- success = PY_LOCK_FAILURE;
- }
-
- return success;
-}
-int
-PyThread_acquire_lock(PyThread_type_lock aLock, int waitflag)
-{
- return PyThread_acquire_lock_timed(aLock, waitflag ? -1 : 0, 0);
-}
-
-void
-PyThread_release_lock(PyThread_type_lock aLock)
-{
- assert(aLock);
- (void)LeaveNonRecursiveMutex((PNRMUTEX) aLock);
-}
/* minimum/maximum thread stack sizes supported */
#define THREAD_MIN_STACKSIZE 0x8000 /* 32 KiB */
diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h
index da405824244..13992f95723 100644
--- a/Python/thread_pthread.h
+++ b/Python/thread_pthread.h
@@ -99,16 +99,6 @@
#undef HAVE_SEM_CLOCKWAIT
#endif
-/* Whether or not to use semaphores directly rather than emulating them with
- * mutexes and condition variables:
- */
-#if (defined(_POSIX_SEMAPHORES) && !defined(HAVE_BROKEN_POSIX_SEMAPHORES) && \
- (defined(HAVE_SEM_TIMEDWAIT) || defined(HAVE_SEM_CLOCKWAIT)))
-# define USE_SEMAPHORES
-#else
-# undef USE_SEMAPHORES
-#endif
-
/* On platforms that don't use standard POSIX threads pthread_sigmask()
* isn't present. DEC threads uses sigprocmask() instead as do most
@@ -442,388 +432,6 @@ PyThread_hang_thread(void)
}
}
-#ifdef USE_SEMAPHORES
-
-/*
- * Lock support.
- */
-
-PyThread_type_lock
-PyThread_allocate_lock(void)
-{
- sem_t *lock;
- int status, error = 0;
-
- if (!initialized)
- PyThread_init_thread();
-
- lock = (sem_t *)PyMem_RawMalloc(sizeof(sem_t));
-
- if (lock) {
- status = sem_init(lock,0,1);
- CHECK_STATUS("sem_init");
-
- if (error) {
- PyMem_RawFree((void *)lock);
- lock = NULL;
- }
- }
-
- return (PyThread_type_lock)lock;
-}
-
-void
-PyThread_free_lock(PyThread_type_lock lock)
-{
- sem_t *thelock = (sem_t *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- if (!thelock)
- return;
-
- status = sem_destroy(thelock);
- CHECK_STATUS("sem_destroy");
-
- PyMem_RawFree((void *)thelock);
-}
-
-/*
- * As of February 2002, Cygwin thread implementations mistakenly report error
- * codes in the return value of the sem_ calls (like the pthread_ functions).
- * Correct implementations return -1 and put the code in errno. This supports
- * either.
- */
-static int
-fix_status(int status)
-{
- return (status == -1) ? errno : status;
-}
-
-PyLockStatus
-PyThread_acquire_lock_timed(PyThread_type_lock lock, PY_TIMEOUT_T microseconds,
- int intr_flag)
-{
- PyLockStatus success;
- sem_t *thelock = (sem_t *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- PyTime_t timeout; // relative timeout
- if (microseconds >= 0) {
- // bpo-41710: PyThread_acquire_lock_timed() cannot report timeout
- // overflow to the caller, so clamp the timeout to
- // [PyTime_MIN, PyTime_MAX].
- //
- // PyTime_MAX nanoseconds is around 292.3 years.
- //
- // _thread.Lock.acquire() and _thread.RLock.acquire() raise an
- // OverflowError if microseconds is greater than PY_TIMEOUT_MAX.
- timeout = _PyTime_FromMicrosecondsClamp(microseconds);
- }
- else {
- timeout = -1;
- }
-
-#ifdef HAVE_SEM_CLOCKWAIT
- struct timespec abs_timeout;
- // Local scope for deadline
- {
- PyTime_t now;
- // silently ignore error: cannot report error to the caller
- (void)PyTime_MonotonicRaw(&now);
- PyTime_t deadline = _PyTime_Add(now, timeout);
- _PyTime_AsTimespec_clamp(deadline, &abs_timeout);
- }
-#else
- PyTime_t deadline = 0;
- if (timeout > 0 && !intr_flag) {
- deadline = _PyDeadline_Init(timeout);
- }
-#endif
-
- while (1) {
- if (timeout > 0) {
-#ifdef HAVE_SEM_CLOCKWAIT
- status = fix_status(sem_clockwait(thelock, CLOCK_MONOTONIC,
- &abs_timeout));
-#else
- PyTime_t now;
- // silently ignore error: cannot report error to the caller
- (void)PyTime_TimeRaw(&now);
- PyTime_t abs_time = _PyTime_Add(now, timeout);
-
- struct timespec ts;
- _PyTime_AsTimespec_clamp(abs_time, &ts);
- status = fix_status(sem_timedwait(thelock, &ts));
-#endif
- }
- else if (timeout == 0) {
- status = fix_status(sem_trywait(thelock));
- }
- else {
- status = fix_status(sem_wait(thelock));
- }
-
- /* Retry if interrupted by a signal, unless the caller wants to be
- notified. */
- if (intr_flag || status != EINTR) {
- break;
- }
-
- // sem_clockwait() uses an absolute timeout, there is no need
- // to recompute the relative timeout.
-#ifndef HAVE_SEM_CLOCKWAIT
- if (timeout > 0) {
- /* wait interrupted by a signal (EINTR): recompute the timeout */
- timeout = _PyDeadline_Get(deadline);
- if (timeout < 0) {
- status = ETIMEDOUT;
- break;
- }
- }
-#endif
- }
-
- /* Don't check the status if we're stopping because of an interrupt. */
- if (!(intr_flag && status == EINTR)) {
- if (timeout > 0) {
- if (status != ETIMEDOUT) {
-#ifdef HAVE_SEM_CLOCKWAIT
- CHECK_STATUS("sem_clockwait");
-#else
- CHECK_STATUS("sem_timedwait");
-#endif
- }
- }
- else if (timeout == 0) {
- if (status != EAGAIN) {
- CHECK_STATUS("sem_trywait");
- }
- }
- else {
- CHECK_STATUS("sem_wait");
- }
- }
-
- if (status == 0) {
- success = PY_LOCK_ACQUIRED;
- } else if (intr_flag && status == EINTR) {
- success = PY_LOCK_INTR;
- } else {
- success = PY_LOCK_FAILURE;
- }
-
- return success;
-}
-
-void
-PyThread_release_lock(PyThread_type_lock lock)
-{
- sem_t *thelock = (sem_t *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- status = sem_post(thelock);
- CHECK_STATUS("sem_post");
-}
-
-#else /* USE_SEMAPHORES */
-
-/*
- * Lock support.
- */
-PyThread_type_lock
-PyThread_allocate_lock(void)
-{
- pthread_lock *lock;
- int status, error = 0;
-
- if (!initialized)
- PyThread_init_thread();
-
- lock = (pthread_lock *) PyMem_RawCalloc(1, sizeof(pthread_lock));
- if (lock) {
- lock->locked = 0;
-
- status = pthread_mutex_init(&lock->mut, NULL);
- CHECK_STATUS_PTHREAD("pthread_mutex_init");
- /* Mark the pthread mutex underlying a Python mutex as
- pure happens-before. We can't simply mark the
- Python-level mutex as a mutex because it can be
- acquired and released in different threads, which
- will cause errors. */
- _Py_ANNOTATE_PURE_HAPPENS_BEFORE_MUTEX(&lock->mut);
-
- status = _PyThread_cond_init(&lock->lock_released);
- CHECK_STATUS_PTHREAD("pthread_cond_init");
-
- if (error) {
- PyMem_RawFree((void *)lock);
- lock = 0;
- }
- }
-
- return (PyThread_type_lock) lock;
-}
-
-void
-PyThread_free_lock(PyThread_type_lock lock)
-{
- pthread_lock *thelock = (pthread_lock *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- /* some pthread-like implementations tie the mutex to the cond
- * and must have the cond destroyed first.
- */
- status = pthread_cond_destroy( &thelock->lock_released );
- CHECK_STATUS_PTHREAD("pthread_cond_destroy");
-
- status = pthread_mutex_destroy( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_destroy");
-
- PyMem_RawFree((void *)thelock);
-}
-
-PyLockStatus
-PyThread_acquire_lock_timed(PyThread_type_lock lock, PY_TIMEOUT_T microseconds,
- int intr_flag)
-{
- PyLockStatus success = PY_LOCK_FAILURE;
- pthread_lock *thelock = (pthread_lock *)lock;
- int status, error = 0;
-
- if (microseconds == 0) {
- status = pthread_mutex_trylock( &thelock->mut );
- if (status != EBUSY) {
- CHECK_STATUS_PTHREAD("pthread_mutex_trylock[1]");
- }
- }
- else {
- status = pthread_mutex_lock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_lock[1]");
- }
- if (status != 0) {
- goto done;
- }
-
- if (thelock->locked == 0) {
- success = PY_LOCK_ACQUIRED;
- goto unlock;
- }
- if (microseconds == 0) {
- goto unlock;
- }
-
- struct timespec abs_timeout;
- if (microseconds > 0) {
- _PyThread_cond_after(microseconds, &abs_timeout);
- }
- // Continue trying until we get the lock
-
- // mut must be locked by me -- part of the condition protocol
- while (1) {
- if (microseconds > 0) {
- status = pthread_cond_timedwait(&thelock->lock_released,
- &thelock->mut, &abs_timeout);
- if (status == 1) {
- break;
- }
- if (status == ETIMEDOUT) {
- break;
- }
- CHECK_STATUS_PTHREAD("pthread_cond_timedwait");
- }
- else {
- status = pthread_cond_wait(
- &thelock->lock_released,
- &thelock->mut);
- CHECK_STATUS_PTHREAD("pthread_cond_wait");
- }
-
- if (intr_flag && status == 0 && thelock->locked) {
- // We were woken up, but didn't get the lock. We probably received
- // a signal. Return PY_LOCK_INTR to allow the caller to handle
- // it and retry.
- success = PY_LOCK_INTR;
- break;
- }
-
- if (status == 0 && !thelock->locked) {
- success = PY_LOCK_ACQUIRED;
- break;
- }
-
- // Wait got interrupted by a signal: retry
- }
-
-unlock:
- if (success == PY_LOCK_ACQUIRED) {
- thelock->locked = 1;
- }
- status = pthread_mutex_unlock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_unlock[1]");
-
-done:
- if (error) {
- success = PY_LOCK_FAILURE;
- }
- return success;
-}
-
-void
-PyThread_release_lock(PyThread_type_lock lock)
-{
- pthread_lock *thelock = (pthread_lock *)lock;
- int status, error = 0;
-
- (void) error; /* silence unused-but-set-variable warning */
-
- status = pthread_mutex_lock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_lock[3]");
-
- thelock->locked = 0;
-
- /* wake up someone (anyone, if any) waiting on the lock */
- status = pthread_cond_signal( &thelock->lock_released );
- CHECK_STATUS_PTHREAD("pthread_cond_signal");
-
- status = pthread_mutex_unlock( &thelock->mut );
- CHECK_STATUS_PTHREAD("pthread_mutex_unlock[3]");
-}
-
-#endif /* USE_SEMAPHORES */
-
-int
-_PyThread_at_fork_reinit(PyThread_type_lock *lock)
-{
- PyThread_type_lock new_lock = PyThread_allocate_lock();
- if (new_lock == NULL) {
- return -1;
- }
-
- /* bpo-6721, bpo-40089: The old lock can be in an inconsistent state.
- fork() can be called in the middle of an operation on the lock done by
- another thread. So don't call PyThread_free_lock(*lock).
-
- Leak memory on purpose. Don't release the memory either since the
- address of a mutex is relevant. Putting two mutexes at the same address
- can lead to problems. */
-
- *lock = new_lock;
- return 0;
-}
-
-int
-PyThread_acquire_lock(PyThread_type_lock lock, int waitflag)
-{
- return PyThread_acquire_lock_timed(lock, waitflag ? -1 : 0, /*intr_flag=*/0);
-}
/* set the thread stack size.
* Return 0 if size is valid, -1 if size is invalid,
diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py
index 3070559db8a..1447f365336 100644
--- a/Tools/cases_generator/analyzer.py
+++ b/Tools/cases_generator/analyzer.py
@@ -681,6 +681,8 @@ NON_ESCAPING_FUNCTIONS = (
"PyStackRef_UntagInt",
"PyStackRef_IncrementTaggedIntNoOverflow",
"PyStackRef_IsNullOrInt",
+ "PyStackRef_IsError",
+ "PyStackRef_IsValid",
)
diff --git a/Tools/cases_generator/stack.py b/Tools/cases_generator/stack.py
index 6b681775f48..df168afa888 100644
--- a/Tools/cases_generator/stack.py
+++ b/Tools/cases_generator/stack.py
@@ -496,7 +496,7 @@ class Storage:
f"Expected '{undefined}' to be defined before '{out.name}'"
else:
undefined = out.name
- while len(self.outputs) > self.peeks and not self.needs_defining(self.outputs[0]):
+ while len(self.outputs) > self.peeks and not self.needs_defining(self.outputs[self.peeks]):
out = self.outputs.pop(self.peeks)
self.stack.push(out)
diff --git a/Tools/msi/freethreaded/freethreaded_files.wxs b/Tools/msi/freethreaded/freethreaded_files.wxs
index 86d9a8b83f6..0707e77b5e9 100644
--- a/Tools/msi/freethreaded/freethreaded_files.wxs
+++ b/Tools/msi/freethreaded/freethreaded_files.wxs
@@ -103,7 +103,7 @@
</ComponentGroup>
</Fragment>
- <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_uuid;_wmi;_zoneinfo;_zstd;_testcapi;_ctypes_test;_testbuffer;_testimportmultiple;_testmultiphase;_testsinglephase;_testconsole;_testinternalcapi;_testclinic;_testclinic_limited;_tkinter ?>
+ <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_remote_debugging;_uuid;_wmi;_zoneinfo;_zstd;_testcapi;_ctypes_test;_testbuffer;_testimportmultiple;_testmultiphase;_testsinglephase;_testconsole;_testinternalcapi;_testclinic;_testclinic_limited;_tkinter ?>
<Fragment>
<DirectoryRef Id="Lib_venv_scripts_nt__freethreaded" />
diff --git a/Tools/msi/lib/lib_files.wxs b/Tools/msi/lib/lib_files.wxs
index 8439518bcbd..4d44299f783 100644
--- a/Tools/msi/lib/lib_files.wxs
+++ b/Tools/msi/lib/lib_files.wxs
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<Wix xmlns="http://schemas.microsoft.com/wix/2006/wi">
- <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_uuid;_wmi;_zoneinfo;_zstd ?>
+ <?define exts=pyexpat;select;unicodedata;winsound;_bz2;_elementtree;_socket;_ssl;_ctypes;_hashlib;_multiprocessing;_lzma;_decimal;_overlapped;_sqlite3;_asyncio;_queue;_remote_debugging;_uuid;_wmi;_zoneinfo;_zstd ?>
<Fragment>
<DirectoryRef Id="Lib_venv_scripts_nt" />
diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py
index 2be85a163b4..09c5651f24a 100644
--- a/Tools/peg_generator/pegen/c_generator.py
+++ b/Tools/peg_generator/pegen/c_generator.py
@@ -44,7 +44,7 @@ EXTENSION_PREFIX = """\
# define MAXSTACK 4000
# endif
#else
-# define MAXSTACK 4000
+# define MAXSTACK 6000
#endif
"""
diff --git a/configure b/configure
index c9ce9b4a735..029bf527da4 100755
--- a/configure
+++ b/configure
@@ -19459,6 +19459,12 @@ then :
printf "%s\n" "#define HAVE_GETLOGIN 1" >>confdefs.h
fi
+ac_fn_c_check_func "$LINENO" "getlogin_r" "ac_cv_func_getlogin_r"
+if test "x$ac_cv_func_getlogin_r" = xyes
+then :
+ printf "%s\n" "#define HAVE_GETLOGIN_R 1" >>confdefs.h
+
+fi
ac_fn_c_check_func "$LINENO" "getpeername" "ac_cv_func_getpeername"
if test "x$ac_cv_func_getpeername" = xyes
then :
@@ -23843,6 +23849,33 @@ fi
+ac_fn_check_decl "$LINENO" "MAXLOGNAME" "ac_cv_have_decl_MAXLOGNAME" "#include <sys/params.h>
+" "$ac_c_undeclared_builtin_options" "CFLAGS"
+if test "x$ac_cv_have_decl_MAXLOGNAME" = xyes
+then :
+
+printf "%s\n" "#define HAVE_MAXLOGNAME 1" >>confdefs.h
+
+fi
+
+ac_fn_check_decl "$LINENO" "UT_NAMESIZE" "ac_cv_have_decl_UT_NAMESIZE" "#include <utmp.h>
+" "$ac_c_undeclared_builtin_options" "CFLAGS"
+if test "x$ac_cv_have_decl_UT_NAMESIZE" = xyes
+then :
+ ac_have_decl=1
+else case e in #(
+ e) ac_have_decl=0 ;;
+esac
+fi
+printf "%s\n" "#define HAVE_DECL_UT_NAMESIZE $ac_have_decl" >>confdefs.h
+if test $ac_have_decl = 1
+then :
+
+printf "%s\n" "#define HAVE_UT_NAMESIZE 1" >>confdefs.h
+
+fi
+
+
# check for openpty, login_tty, and forkpty
@@ -32557,6 +32590,14 @@ LIBHACL_CFLAGS="${LIBHACL_FLAG_I} ${LIBHACL_FLAG_D} \$(PY_STDMODULE_CFLAGS) \$(C
LIBHACL_LDFLAGS= # for now, no specific linker flags are needed
+if test "$UNIVERSAL_ARCHS" = "universal2" -o \
+ \( "$build_cpu" = "aarch64" -a "$build_vendor" = "apple" \)
+then
+ use_hacl_universal2_impl=yes
+else
+ use_hacl_universal2_impl=no
+fi
+
# The SIMD files use aligned_alloc, which is not available on older versions of
# Android.
# The *mmintrin.h headers are x86-family-specific, so can't be used on WASI.
@@ -32610,7 +32651,7 @@ printf "%s\n" "#define HACL_CAN_COMPILE_SIMD128 1" >>confdefs.h
# isn't great, so it's disabled on ARM64.
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for HACL* SIMD128 implementation" >&5
printf %s "checking for HACL* SIMD128 implementation... " >&6; }
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
LIBHACL_BLAKE2_SIMD128_OBJS="Modules/_hacl/Hacl_Hash_Blake2s_Simd128_universal2.o"
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: universal2" >&5
printf "%s\n" "universal2" >&6; }
@@ -32687,7 +32728,7 @@ printf "%s\n" "#define HACL_CAN_COMPILE_SIMD256 1" >>confdefs.h
# wrapped implementation if we're building for universal2.
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking for HACL* SIMD256 implementation" >&5
printf %s "checking for HACL* SIMD256 implementation... " >&6; }
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
LIBHACL_BLAKE2_SIMD256_OBJS="Modules/_hacl/Hacl_Hash_Blake2b_Simd256_universal2.o"
{ printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: universal2" >&5
printf "%s\n" "universal2" >&6; }
diff --git a/configure.ac b/configure.ac
index eb52365d95b..371b2e8ed73 100644
--- a/configure.ac
+++ b/configure.ac
@@ -5219,7 +5219,7 @@ AC_CHECK_FUNCS([ \
faccessat fchmod fchmodat fchown fchownat fdopendir fdwalk fexecve \
fork fork1 fpathconf fstatat ftime ftruncate futimens futimes futimesat \
gai_strerror getegid geteuid getgid getgrent getgrgid getgrgid_r \
- getgrnam_r getgrouplist gethostname getitimer getloadavg getlogin \
+ getgrnam_r getgrouplist gethostname getitimer getloadavg getlogin getlogin_r \
getpeername getpgid getpid getppid getpriority _getpty \
getpwent getpwnam_r getpwuid getpwuid_r getresgid getresuid getrusage getsid getspent \
getspnam getuid getwd grantpt if_nameindex initgroups kill killpg lchown linkat \
@@ -5538,6 +5538,18 @@ PY_CHECK_FUNC([setgroups], [
#endif
])
+AC_CHECK_DECL([MAXLOGNAME],
+ [AC_DEFINE([HAVE_MAXLOGNAME], [1],
+ [Define if you have the 'MAXLOGNAME' constant.])],
+ [],
+ [@%:@include <sys/params.h>])
+
+AC_CHECK_DECLS([UT_NAMESIZE],
+ [AC_DEFINE([HAVE_UT_NAMESIZE], [1],
+ [Define if you have the 'HAVE_UT_NAMESIZE' constant.])],
+ [],
+ [@%:@include <utmp.h>])
+
# check for openpty, login_tty, and forkpty
AC_CHECK_FUNCS([openpty], [],
@@ -8001,6 +8013,15 @@ AC_SUBST([LIBHACL_CFLAGS])
LIBHACL_LDFLAGS= # for now, no specific linker flags are needed
AC_SUBST([LIBHACL_LDFLAGS])
+dnl Check if universal2 HACL* implementation should be used.
+if test "$UNIVERSAL_ARCHS" = "universal2" -o \
+ \( "$build_cpu" = "aarch64" -a "$build_vendor" = "apple" \)
+then
+ use_hacl_universal2_impl=yes
+else
+ use_hacl_universal2_impl=no
+fi
+
# The SIMD files use aligned_alloc, which is not available on older versions of
# Android.
# The *mmintrin.h headers are x86-family-specific, so can't be used on WASI.
@@ -8017,7 +8038,7 @@ then
# available on x86_64. However, performance of the HACL SIMD128 implementation
# isn't great, so it's disabled on ARM64.
AC_MSG_CHECKING([for HACL* SIMD128 implementation])
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
[LIBHACL_BLAKE2_SIMD128_OBJS="Modules/_hacl/Hacl_Hash_Blake2s_Simd128_universal2.o"]
AC_MSG_RESULT([universal2])
else
@@ -8049,7 +8070,7 @@ then
# implementation requires symbols that aren't available on ARM64. Use a
# wrapped implementation if we're building for universal2.
AC_MSG_CHECKING([for HACL* SIMD256 implementation])
- if test "$UNIVERSAL_ARCHS" == "universal2"; then
+ if test "$use_hacl_universal2_impl" = "yes"; then
[LIBHACL_BLAKE2_SIMD256_OBJS="Modules/_hacl/Hacl_Hash_Blake2b_Simd256_universal2.o"]
AC_MSG_RESULT([universal2])
else
diff --git a/iOS/testbed/__main__.py b/iOS/testbed/__main__.py
index c05497ede3a..1146bf3b988 100644
--- a/iOS/testbed/__main__.py
+++ b/iOS/testbed/__main__.py
@@ -127,7 +127,7 @@ async def async_check_output(*args, **kwargs):
async def select_simulator_device():
# List the testing simulators, in JSON format
raw_json = await async_check_output(
- "xcrun", "simctl", "--set", "testing", "list", "-j"
+ "xcrun", "simctl", "list", "-j"
)
json_data = json.loads(raw_json)
diff --git a/pyconfig.h.in b/pyconfig.h.in
index 3dbbda157df..65a2c55217c 100644
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -267,6 +267,10 @@
*/
#undef HAVE_DECL_TZNAME
+/* Define to 1 if you have the declaration of 'UT_NAMESIZE', and to 0 if you
+ don't. */
+#undef HAVE_DECL_UT_NAMESIZE
+
/* Define to 1 if you have the device macros. */
#undef HAVE_DEVICE_MACROS
@@ -539,6 +543,9 @@
/* Define to 1 if you have the 'getlogin' function. */
#undef HAVE_GETLOGIN
+/* Define to 1 if you have the 'getlogin_r' function. */
+#undef HAVE_GETLOGIN_R
+
/* Define to 1 if you have the 'getnameinfo' function. */
#undef HAVE_GETNAMEINFO
@@ -807,6 +814,9 @@
/* Define this if you have the makedev macro. */
#undef HAVE_MAKEDEV
+/* Define if you have the 'MAXLOGNAME' constant. */
+#undef HAVE_MAXLOGNAME
+
/* Define to 1 if you have the 'mbrtowc' function. */
#undef HAVE_MBRTOWC
@@ -1575,6 +1585,9 @@
/* Define to 1 if you have the <utmp.h> header file. */
#undef HAVE_UTMP_H
+/* Define if you have the 'HAVE_UT_NAMESIZE' constant. */
+#undef HAVE_UT_NAMESIZE
+
/* Define to 1 if you have the 'uuid_create' function. */
#undef HAVE_UUID_CREATE