aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Tools/build
diff options
context:
space:
mode:
Diffstat (limited to 'Tools/build')
-rw-r--r--Tools/build/.ruff.toml12
-rw-r--r--Tools/build/check_warnings.py31
-rw-r--r--Tools/build/compute-changes.py24
-rw-r--r--Tools/build/deepfreeze.py51
-rw-r--r--Tools/build/generate-build-details.py24
-rw-r--r--Tools/build/generate_sbom.py22
-rw-r--r--Tools/build/generate_stdlib_module_names.py1
-rwxr-xr-xTools/build/generate_token.py6
-rw-r--r--Tools/build/mypy.ini13
-rw-r--r--Tools/build/umarshal.py11
-rw-r--r--Tools/build/update_file.py24
-rwxr-xr-xTools/build/verify_ensurepip_wheels.py6
12 files changed, 147 insertions, 78 deletions
diff --git a/Tools/build/.ruff.toml b/Tools/build/.ruff.toml
index c084c06144a..dcbf2936290 100644
--- a/Tools/build/.ruff.toml
+++ b/Tools/build/.ruff.toml
@@ -1,5 +1,13 @@
extend = "../../.ruff.toml" # Inherit the project-wide settings
+[per-file-target-version]
+"deepfreeze.py" = "py311" # requires `code.co_exceptiontable`
+"stable_abi.py" = "py311" # requires 'tomllib'
+
+[format]
+preview = true
+docstring-code-format = true
+
[lint]
select = [
"C4", # flake8-comprehensions
@@ -24,10 +32,6 @@ ignore = [
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
]
-[per-file-target-version]
-"deepfreeze.py" = "py310"
-"stable_abi.py" = "py311" # requires 'tomllib'
-
[lint.per-file-ignores]
"{check_extension_modules,freeze_modules}.py" = [
"UP031", # Use format specifiers instead of percent format
diff --git a/Tools/build/check_warnings.py b/Tools/build/check_warnings.py
index 7a8721087b6..3f49d8e7f2e 100644
--- a/Tools/build/check_warnings.py
+++ b/Tools/build/check_warnings.py
@@ -83,17 +83,13 @@ def extract_warnings_from_compiler_output(
for i, line in enumerate(compiler_output.splitlines(), start=1):
if match := compiled_regex.match(line):
try:
- compiler_warnings.append(
- {
- "file": match.group("file").removeprefix(path_prefix),
- "line": match.group("line"),
- "column": match.group("column"),
- "message": match.group("message"),
- "option": match.group("option")
- .lstrip("[")
- .rstrip("]"),
- }
- )
+ compiler_warnings.append({
+ "file": match.group("file").removeprefix(path_prefix),
+ "line": match.group("line"),
+ "column": match.group("column"),
+ "message": match.group("message"),
+ "option": match.group("option").lstrip("[").rstrip("]"),
+ })
except AttributeError:
print(
f"Error parsing compiler output. "
@@ -151,7 +147,6 @@ def get_unexpected_warnings(
"""
unexpected_warnings = {}
for file in files_with_warnings.keys():
-
rule = is_file_ignored(file, ignore_rules)
if rule:
@@ -201,13 +196,11 @@ def get_unexpected_improvements(
if rule.file_path not in files_with_warnings.keys():
unexpected_improvements.append((rule.file_path, rule.count, 0))
elif len(files_with_warnings[rule.file_path]) < rule.count:
- unexpected_improvements.append(
- (
- rule.file_path,
- rule.count,
- len(files_with_warnings[rule.file_path]),
- )
- )
+ unexpected_improvements.append((
+ rule.file_path,
+ rule.count,
+ len(files_with_warnings[rule.file_path]),
+ ))
if unexpected_improvements:
print("Unexpected improvements:")
diff --git a/Tools/build/compute-changes.py b/Tools/build/compute-changes.py
index b3be7df2dba..b5993d29b92 100644
--- a/Tools/build/compute-changes.py
+++ b/Tools/build/compute-changes.py
@@ -56,12 +56,10 @@ class Outputs:
def compute_changes() -> None:
- target_branch, head_branch = git_branches()
- if target_branch and head_branch:
+ target_branch, head_ref = git_refs()
+ if os.environ.get("GITHUB_EVENT_NAME", "") == "pull_request":
# Getting changed files only makes sense on a pull request
- files = get_changed_files(
- f"origin/{target_branch}", f"origin/{head_branch}"
- )
+ files = get_changed_files(target_branch, head_ref)
outputs = process_changed_files(files)
else:
# Otherwise, just run the tests
@@ -89,15 +87,15 @@ def compute_changes() -> None:
write_github_output(outputs)
-def git_branches() -> tuple[str, str]:
- target_branch = os.environ.get("GITHUB_BASE_REF", "")
- target_branch = target_branch.removeprefix("refs/heads/")
- print(f"target branch: {target_branch!r}")
+def git_refs() -> tuple[str, str]:
+ target_ref = os.environ.get("CCF_TARGET_REF", "")
+ target_ref = target_ref.removeprefix("refs/heads/")
+ print(f"target ref: {target_ref!r}")
- head_branch = os.environ.get("GITHUB_HEAD_REF", "")
- head_branch = head_branch.removeprefix("refs/heads/")
- print(f"head branch: {head_branch!r}")
- return target_branch, head_branch
+ head_ref = os.environ.get("CCF_HEAD_REF", "")
+ head_ref = head_ref.removeprefix("refs/heads/")
+ print(f"head ref: {head_ref!r}")
+ return f"origin/{target_ref}", head_ref
def get_changed_files(
diff --git a/Tools/build/deepfreeze.py b/Tools/build/deepfreeze.py
index 23f58447937..2b9f03aebb6 100644
--- a/Tools/build/deepfreeze.py
+++ b/Tools/build/deepfreeze.py
@@ -2,9 +2,12 @@
The script may be executed by _bootstrap_python interpreter.
Shared library extension modules are not available in that case.
-On Windows, and in cross-compilation cases, it is executed
-by Python 3.10, and 3.11 features are not available.
+Requires 3.11+ to be executed,
+because relies on `code.co_qualname` and `code.co_exceptiontable`.
"""
+
+from __future__ import annotations
+
import argparse
import builtins
import collections
@@ -13,10 +16,14 @@ import os
import re
import time
import types
-from typing import TextIO
import umarshal
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+ from typing import Any, TextIO
+
ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
verbose = False
@@ -45,8 +52,8 @@ CO_FAST_FREE = 0x80
next_code_version = 1
-def get_localsplus(code: types.CodeType):
- a = collections.defaultdict(int)
+def get_localsplus(code: types.CodeType) -> tuple[tuple[str, ...], bytes]:
+ a: collections.defaultdict[str, int] = collections.defaultdict(int)
for name in code.co_varnames:
a[name] |= CO_FAST_LOCAL
for name in code.co_cellvars:
@@ -136,7 +143,7 @@ class Printer:
return identifiers, strings
@contextlib.contextmanager
- def indent(self) -> None:
+ def indent(self) -> Iterator[None]:
save_level = self.level
try:
self.level += 1
@@ -148,7 +155,7 @@ class Printer:
self.file.writelines((" "*self.level, arg, "\n"))
@contextlib.contextmanager
- def block(self, prefix: str, suffix: str = "") -> None:
+ def block(self, prefix: str, suffix: str = "") -> Iterator[None]:
self.write(prefix + " {")
with self.indent():
yield
@@ -250,9 +257,17 @@ class Printer:
co_names = self.generate(name + "_names", code.co_names)
co_filename = self.generate(name + "_filename", code.co_filename)
co_name = self.generate(name + "_name", code.co_name)
- co_qualname = self.generate(name + "_qualname", code.co_qualname)
co_linetable = self.generate(name + "_linetable", code.co_linetable)
- co_exceptiontable = self.generate(name + "_exceptiontable", code.co_exceptiontable)
+ # We use 3.10 for type checking, but this module requires 3.11
+ # TODO: bump python version for this script.
+ co_qualname = self.generate(
+ name + "_qualname",
+ code.co_qualname, # type: ignore[attr-defined]
+ )
+ co_exceptiontable = self.generate(
+ name + "_exceptiontable",
+ code.co_exceptiontable, # type: ignore[attr-defined]
+ )
# These fields are not directly accessible
localsplusnames, localspluskinds = get_localsplus(code)
co_localsplusnames = self.generate(name + "_localsplusnames", localsplusnames)
@@ -379,13 +394,13 @@ class Printer:
self.write(f".cval = {{ {z.real}, {z.imag} }},")
return f"&{name}.ob_base"
- def generate_frozenset(self, name: str, fs: frozenset[object]) -> str:
+ def generate_frozenset(self, name: str, fs: frozenset[Any]) -> str:
try:
- fs = sorted(fs)
+ fs_sorted = sorted(fs)
except TypeError:
# frozen set with incompatible types, fallback to repr()
- fs = sorted(fs, key=repr)
- ret = self.generate_tuple(name, tuple(fs))
+ fs_sorted = sorted(fs, key=repr)
+ ret = self.generate_tuple(name, tuple(fs_sorted))
self.write("// TODO: The above tuple should be a frozenset")
return ret
@@ -402,7 +417,7 @@ class Printer:
# print(f"Cache hit {key!r:.40}: {self.cache[key]!r:.40}")
return self.cache[key]
self.misses += 1
- if isinstance(obj, (types.CodeType, umarshal.Code)) :
+ if isinstance(obj, types.CodeType) :
val = self.generate_code(name, obj)
elif isinstance(obj, tuple):
val = self.generate_tuple(name, obj)
@@ -458,7 +473,7 @@ def decode_frozen_data(source: str) -> types.CodeType:
if re.match(FROZEN_DATA_LINE, line):
values.extend([int(x) for x in line.split(",") if x.strip()])
data = bytes(values)
- return umarshal.loads(data)
+ return umarshal.loads(data) # type: ignore[no-any-return]
def generate(args: list[str], output: TextIO) -> None:
@@ -494,12 +509,12 @@ group.add_argument('args', nargs="*", default=(),
help="Input file and module name (required) in file:modname format")
@contextlib.contextmanager
-def report_time(label: str):
- t0 = time.time()
+def report_time(label: str) -> Iterator[None]:
+ t0 = time.perf_counter()
try:
yield
finally:
- t1 = time.time()
+ t1 = time.perf_counter()
if verbose:
print(f"{label}: {t1-t0:.3f} sec")
diff --git a/Tools/build/generate-build-details.py b/Tools/build/generate-build-details.py
index 0da6c2948d6..8cd23e2f54f 100644
--- a/Tools/build/generate-build-details.py
+++ b/Tools/build/generate-build-details.py
@@ -3,6 +3,8 @@
# Script initially imported from:
# https://github.com/FFY00/python-instrospection/blob/main/python_introspection/scripts/generate-build-details.py
+from __future__ import annotations
+
import argparse
import collections
import importlib.machinery
@@ -11,19 +13,23 @@ import os
import sys
import sysconfig
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from typing import Any
+
-def version_info_to_dict(obj): # (object) -> dict[str, Any]
+def version_info_to_dict(obj: sys._version_info) -> dict[str, Any]:
field_names = ('major', 'minor', 'micro', 'releaselevel', 'serial')
return {field: getattr(obj, field) for field in field_names}
-def get_dict_key(container, key): # (dict[str, Any], str) -> dict[str, Any]
+def get_dict_key(container: dict[str, Any], key: str) -> dict[str, Any]:
for part in key.split('.'):
container = container[part]
return container
-def generate_data(schema_version):
+def generate_data(schema_version: str) -> collections.defaultdict[str, Any]:
"""Generate the build-details.json data (PEP 739).
:param schema_version: The schema version of the data we want to generate.
@@ -32,7 +38,9 @@ def generate_data(schema_version):
if schema_version != '1.0':
raise ValueError(f'Unsupported schema_version: {schema_version}')
- data = collections.defaultdict(lambda: collections.defaultdict(dict))
+ data: collections.defaultdict[str, Any] = collections.defaultdict(
+ lambda: collections.defaultdict(dict),
+ )
data['schema_version'] = schema_version
@@ -67,7 +75,7 @@ def generate_data(schema_version):
PY3LIBRARY = sysconfig.get_config_var('PY3LIBRARY')
LIBPYTHON = sysconfig.get_config_var('LIBPYTHON')
LIBPC = sysconfig.get_config_var('LIBPC')
- INCLUDEDIR = sysconfig.get_config_var('INCLUDEDIR')
+ INCLUDEPY = sysconfig.get_config_var('INCLUDEPY')
if os.name == 'posix':
# On POSIX, LIBRARY is always the static library, while LDLIBRARY is the
@@ -115,14 +123,14 @@ def generate_data(schema_version):
if has_static_library:
data['libpython']['static'] = os.path.join(LIBDIR, LIBRARY)
- data['c_api']['include'] = INCLUDEDIR
+ data['c_api']['headers'] = INCLUDEPY
if LIBPC:
data['c_api']['pkgconfig_path'] = LIBPC
return data
-def make_paths_relative(data, config_path=None): # (dict[str, Any], str | None) -> None
+def make_paths_relative(data: dict[str, Any], config_path: str | None = None) -> None:
# Make base_prefix relative to the config_path directory
if config_path:
data['base_prefix'] = os.path.relpath(data['base_prefix'], os.path.dirname(config_path))
@@ -152,7 +160,7 @@ def make_paths_relative(data, config_path=None): # (dict[str, Any], str | None)
container[child] = new_path
-def main(): # () -> None
+def main() -> None:
parser = argparse.ArgumentParser(exit_on_error=False)
parser.add_argument('location')
parser.add_argument(
diff --git a/Tools/build/generate_sbom.py b/Tools/build/generate_sbom.py
index db01426e972..968397728b2 100644
--- a/Tools/build/generate_sbom.py
+++ b/Tools/build/generate_sbom.py
@@ -4,10 +4,13 @@ import glob
import hashlib
import json
import os
+import random
import re
import subprocess
import sys
+import time
import typing
+import urllib.error
import urllib.request
from pathlib import Path, PurePosixPath, PureWindowsPath
@@ -161,6 +164,23 @@ def get_externals() -> list[str]:
return externals
+def download_with_retries(download_location: str,
+ max_retries: int = 7,
+ base_delay: float = 2.25,
+ max_jitter: float = 1.0) -> typing.Any:
+ """Download a file with exponential backoff retry."""
+ for attempt in range(max_retries + 1):
+ try:
+ resp = urllib.request.urlopen(download_location)
+ except (urllib.error.URLError, ConnectionError) as ex:
+ if attempt == max_retries:
+ msg = f"Download from {download_location} failed."
+ raise OSError(msg) from ex
+ time.sleep(base_delay**attempt + random.uniform(0, max_jitter))
+ else:
+ return resp
+
+
def check_sbom_packages(sbom_data: dict[str, typing.Any]) -> None:
"""Make a bunch of assertions about the SBOM package data to ensure it's consistent."""
@@ -175,7 +195,7 @@ def check_sbom_packages(sbom_data: dict[str, typing.Any]) -> None:
# and that the download URL is valid.
if "checksums" not in package or "CI" in os.environ:
download_location = package["downloadLocation"]
- resp = urllib.request.urlopen(download_location)
+ resp = download_with_retries(download_location)
error_if(resp.status != 200, f"Couldn't access URL: {download_location}'")
package["checksums"] = [{
diff --git a/Tools/build/generate_stdlib_module_names.py b/Tools/build/generate_stdlib_module_names.py
index 9873890837f..88414cdbb37 100644
--- a/Tools/build/generate_stdlib_module_names.py
+++ b/Tools/build/generate_stdlib_module_names.py
@@ -34,7 +34,6 @@ IGNORE = {
'_testlimitedcapi',
'_testmultiphase',
'_testsinglephase',
- '_testexternalinspection',
'_xxtestfuzz',
'idlelib.idle_test',
'test',
diff --git a/Tools/build/generate_token.py b/Tools/build/generate_token.py
index a64806763f3..9ee5ec86e75 100755
--- a/Tools/build/generate_token.py
+++ b/Tools/build/generate_token.py
@@ -278,13 +278,13 @@ EXACT_TOKEN_TYPES = {
%s
}
-def ISTERMINAL(x):
+def ISTERMINAL(x: int) -> bool:
return x < NT_OFFSET
-def ISNONTERMINAL(x):
+def ISNONTERMINAL(x: int) -> bool:
return x >= NT_OFFSET
-def ISEOF(x):
+def ISEOF(x: int) -> bool:
return x == ENDMARKER
'''
diff --git a/Tools/build/mypy.ini b/Tools/build/mypy.ini
index 06224163884..123dc895f90 100644
--- a/Tools/build/mypy.ini
+++ b/Tools/build/mypy.ini
@@ -1,7 +1,16 @@
[mypy]
+
+# Please, when adding new files here, also add them to:
+# .github/workflows/mypy.yml
files =
Tools/build/compute-changes.py,
- Tools/build/generate_sbom.py
+ Tools/build/deepfreeze.py,
+ Tools/build/generate-build-details.py,
+ Tools/build/generate_sbom.py,
+ Tools/build/verify_ensurepip_wheels.py,
+ Tools/build/update_file.py,
+ Tools/build/umarshal.py
+
pretty = True
# Make sure Python can still be built
@@ -10,6 +19,8 @@ python_version = 3.10
# ...And be strict:
strict = True
+strict_bytes = True
+local_partial_types = True
extra_checks = True
enable_error_code = ignore-without-code,redundant-expr,truthy-bool,possibly-undefined
warn_unreachable = True
diff --git a/Tools/build/umarshal.py b/Tools/build/umarshal.py
index 679fa7caf9f..865cffc2440 100644
--- a/Tools/build/umarshal.py
+++ b/Tools/build/umarshal.py
@@ -145,12 +145,12 @@ class Reader:
def r_float_bin(self) -> float:
buf = self.r_string(8)
import struct # Lazy import to avoid breaking UNIX build
- return struct.unpack("d", buf)[0]
+ return struct.unpack("d", buf)[0] # type: ignore[no-any-return]
def r_float_str(self) -> float:
n = self.r_byte()
buf = self.r_string(n)
- return ast.literal_eval(buf.decode("ascii"))
+ return ast.literal_eval(buf.decode("ascii")) # type: ignore[no-any-return]
def r_ref_reserve(self, flag: int) -> int:
if flag:
@@ -306,7 +306,7 @@ def loads(data: bytes) -> Any:
return r.r_object()
-def main():
+def main() -> None:
# Test
import marshal
import pprint
@@ -314,8 +314,9 @@ def main():
data = marshal.dumps(sample)
retval = loads(data)
assert retval == sample, retval
- sample = main.__code__
- data = marshal.dumps(sample)
+
+ sample2 = main.__code__
+ data = marshal.dumps(sample2)
retval = loads(data)
assert isinstance(retval, Code), retval
pprint.pprint(retval.__dict__)
diff --git a/Tools/build/update_file.py b/Tools/build/update_file.py
index b4182c1d0cb..b4a5fb6e778 100644
--- a/Tools/build/update_file.py
+++ b/Tools/build/update_file.py
@@ -6,14 +6,27 @@ This avoids wholesale rebuilds when a code (re)generation phase does not
actually change the in-tree generated code.
"""
+from __future__ import annotations
+
import contextlib
import os
import os.path
import sys
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ import typing
+ from collections.abc import Iterator
+ from io import TextIOWrapper
+
+ _Outcome: typing.TypeAlias = typing.Literal['created', 'updated', 'same']
+
@contextlib.contextmanager
-def updating_file_with_tmpfile(filename, tmpfile=None):
+def updating_file_with_tmpfile(
+ filename: str,
+ tmpfile: str | None = None,
+) -> Iterator[tuple[TextIOWrapper, TextIOWrapper]]:
"""A context manager for updating a file via a temp file.
The context manager provides two open files: the source file open
@@ -46,13 +59,18 @@ def updating_file_with_tmpfile(filename, tmpfile=None):
update_file_with_tmpfile(filename, tmpfile)
-def update_file_with_tmpfile(filename, tmpfile, *, create=False):
+def update_file_with_tmpfile(
+ filename: str,
+ tmpfile: str,
+ *,
+ create: bool = False,
+) -> _Outcome:
try:
targetfile = open(filename, 'rb')
except FileNotFoundError:
if not create:
raise # re-raise
- outcome = 'created'
+ outcome: _Outcome = 'created'
os.replace(tmpfile, filename)
else:
with targetfile:
diff --git a/Tools/build/verify_ensurepip_wheels.py b/Tools/build/verify_ensurepip_wheels.py
index a37da2f7075..46c42916d93 100755
--- a/Tools/build/verify_ensurepip_wheels.py
+++ b/Tools/build/verify_ensurepip_wheels.py
@@ -20,13 +20,13 @@ ENSURE_PIP_INIT_PY_TEXT = (ENSURE_PIP_ROOT / "__init__.py").read_text(encoding="
GITHUB_ACTIONS = os.getenv("GITHUB_ACTIONS") == "true"
-def print_notice(file_path: str, message: str) -> None:
+def print_notice(file_path: str | Path, message: str) -> None:
if GITHUB_ACTIONS:
message = f"::notice file={file_path}::{message}"
print(message, end="\n\n")
-def print_error(file_path: str, message: str) -> None:
+def print_error(file_path: str | Path, message: str) -> None:
if GITHUB_ACTIONS:
message = f"::error file={file_path}::{message}"
print(message, end="\n\n")
@@ -67,6 +67,7 @@ def verify_wheel(package_name: str) -> bool:
return False
release_files = json.loads(raw_text)["releases"][package_version]
+ expected_digest = ""
for release_info in release_files:
if package_path.name != release_info["filename"]:
continue
@@ -95,6 +96,7 @@ def verify_wheel(package_name: str) -> bool:
return True
+
if __name__ == "__main__":
exit_status = int(not verify_wheel("pip"))
raise SystemExit(exit_status)