aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Lib
diff options
context:
space:
mode:
Diffstat (limited to 'Lib')
-rw-r--r--Lib/argparse.py61
-rw-r--r--Lib/asyncio/graph.py6
-rw-r--r--Lib/compression/bz2.py (renamed from Lib/compression/bz2/__init__.py)0
-rw-r--r--Lib/compression/gzip.py (renamed from Lib/compression/gzip/__init__.py)0
-rw-r--r--Lib/compression/lzma.py (renamed from Lib/compression/lzma/__init__.py)0
-rw-r--r--Lib/compression/zlib.py (renamed from Lib/compression/zlib/__init__.py)0
-rw-r--r--Lib/compression/zstd/__init__.py52
-rw-r--r--Lib/compression/zstd/_zstdfile.py66
-rw-r--r--Lib/getpass.py11
-rw-r--r--Lib/glob.py30
-rw-r--r--Lib/logging/__init__.py32
-rw-r--r--Lib/pathlib/__init__.py7
-rw-r--r--Lib/pathlib/_os.py28
-rw-r--r--Lib/pathlib/types.py51
-rw-r--r--Lib/pdb.py3
-rw-r--r--Lib/tarfile.py2
-rw-r--r--Lib/test/_test_multiprocessing.py21
-rw-r--r--Lib/test/support/__init__.py4
-rw-r--r--Lib/test/support/strace_helper.py2
-rw-r--r--Lib/test/test_argparse.py131
-rw-r--r--Lib/test/test_asyncio/test_tools.py8
-rw-r--r--Lib/test/test_codeccallbacks.py39
-rw-r--r--Lib/test/test_codecs.py52
-rw-r--r--Lib/test/test_crossinterp.py34
-rw-r--r--Lib/test/test_dict.py2
-rw-r--r--Lib/test/test_fcntl.py46
-rw-r--r--Lib/test/test_fractions.py208
-rw-r--r--Lib/test/test_free_threading/test_functools.py75
-rw-r--r--Lib/test/test_genericalias.py6
-rw-r--r--Lib/test/test_ioctl.py3
-rw-r--r--Lib/test/test_logging.py23
-rw-r--r--Lib/test/test_pathlib/support/lexical_path.py11
-rw-r--r--Lib/test/test_pathlib/support/local_path.py10
-rw-r--r--Lib/test/test_pathlib/support/zip_path.py45
-rw-r--r--Lib/test/test_pathlib/test_join_windows.py17
-rw-r--r--Lib/test/test_pathlib/test_pathlib.py12
-rw-r--r--Lib/test/test_sys.py35
-rw-r--r--Lib/test/test_typing.py160
-rw-r--r--Lib/test/test_urllib.py2
-rw-r--r--Lib/test/test_wave.py26
-rw-r--r--Lib/test/test_zipfile/test_core.py2
-rw-r--r--Lib/test/test_zstd.py25
-rw-r--r--Lib/typing.py72
-rw-r--r--Lib/venv/__init__.py7
-rw-r--r--Lib/wave.py29
-rw-r--r--Lib/zipfile/__init__.py16
46 files changed, 886 insertions, 586 deletions
diff --git a/Lib/argparse.py b/Lib/argparse.py
index f13ac82dbc5..f688c38d0d1 100644
--- a/Lib/argparse.py
+++ b/Lib/argparse.py
@@ -167,7 +167,6 @@ class HelpFormatter(object):
indent_increment=2,
max_help_position=24,
width=None,
- prefix_chars='-',
color=False,
):
# default setting for width
@@ -176,16 +175,7 @@ class HelpFormatter(object):
width = shutil.get_terminal_size().columns
width -= 2
- from _colorize import can_colorize, decolor, get_theme
-
- if color and can_colorize():
- self._theme = get_theme(force_color=True).argparse
- self._decolor = decolor
- else:
- self._theme = get_theme(force_no_color=True).argparse
- self._decolor = lambda text: text
-
- self._prefix_chars = prefix_chars
+ self._set_color(color)
self._prog = prog
self._indent_increment = indent_increment
self._max_help_position = min(max_help_position,
@@ -202,6 +192,16 @@ class HelpFormatter(object):
self._whitespace_matcher = _re.compile(r'\s+', _re.ASCII)
self._long_break_matcher = _re.compile(r'\n\n\n+')
+ def _set_color(self, color):
+ from _colorize import can_colorize, decolor, get_theme
+
+ if color and can_colorize():
+ self._theme = get_theme(force_color=True).argparse
+ self._decolor = decolor
+ else:
+ self._theme = get_theme(force_no_color=True).argparse
+ self._decolor = lambda text: text
+
# ===============================
# Section and indentation methods
# ===============================
@@ -415,14 +415,7 @@ class HelpFormatter(object):
return ' '.join(self._get_actions_usage_parts(actions, groups))
def _is_long_option(self, string):
- return len(string) >= 2 and string[1] in self._prefix_chars
-
- def _is_short_option(self, string):
- return (
- not self._is_long_option(string)
- and len(string) >= 1
- and string[0] in self._prefix_chars
- )
+ return len(string) > 2
def _get_actions_usage_parts(self, actions, groups):
# find group indices and identify actions in groups
@@ -471,25 +464,22 @@ class HelpFormatter(object):
# produce the first way to invoke the option in brackets
else:
option_string = action.option_strings[0]
+ if self._is_long_option(option_string):
+ option_color = t.summary_long_option
+ else:
+ option_color = t.summary_short_option
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
part = action.format_usage()
- if self._is_long_option(part):
- part = f"{t.summary_long_option}{part}{t.reset}"
- elif self._is_short_option(part):
- part = f"{t.summary_short_option}{part}{t.reset}"
+ part = f"{option_color}{part}{t.reset}"
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
else:
default = self._get_default_metavar_for_optional(action)
args_string = self._format_args(action, default)
- if self._is_long_option(option_string):
- option_color = t.summary_long_option
- elif self._is_short_option(option_string):
- option_color = t.summary_short_option
part = (
f"{option_color}{option_string} "
f"{t.summary_label}{args_string}{t.reset}"
@@ -606,10 +596,8 @@ class HelpFormatter(object):
for s in strings:
if self._is_long_option(s):
parts.append(f"{t.long_option}{s}{t.reset}")
- elif self._is_short_option(s):
- parts.append(f"{t.short_option}{s}{t.reset}")
else:
- parts.append(s)
+ parts.append(f"{t.short_option}{s}{t.reset}")
return parts
# if the Optional doesn't take a value, format is:
@@ -2723,16 +2711,9 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
return formatter.format_help()
def _get_formatter(self):
- if isinstance(self.formatter_class, type) and issubclass(
- self.formatter_class, HelpFormatter
- ):
- return self.formatter_class(
- prog=self.prog,
- prefix_chars=self.prefix_chars,
- color=self.color,
- )
- else:
- return self.formatter_class(prog=self.prog)
+ formatter = self.formatter_class(prog=self.prog)
+ formatter._set_color(self.color)
+ return formatter
# =====================
# Help-printing methods
diff --git a/Lib/asyncio/graph.py b/Lib/asyncio/graph.py
index d8df7c9919a..b5bfeb1630a 100644
--- a/Lib/asyncio/graph.py
+++ b/Lib/asyncio/graph.py
@@ -1,6 +1,7 @@
"""Introspection utils for tasks call graphs."""
import dataclasses
+import io
import sys
import types
@@ -16,9 +17,6 @@ __all__ = (
'FutureCallGraph',
)
-if False: # for type checkers
- from typing import TextIO
-
# Sadly, we can't re-use the traceback module's datastructures as those
# are tailored for error reporting, whereas we need to represent an
# async call graph.
@@ -270,7 +268,7 @@ def print_call_graph(
future: futures.Future | None = None,
/,
*,
- file: TextIO | None = None,
+ file: io.Writer[str] | None = None,
depth: int = 1,
limit: int | None = None,
) -> None:
diff --git a/Lib/compression/bz2/__init__.py b/Lib/compression/bz2.py
index 16815d6cd20..16815d6cd20 100644
--- a/Lib/compression/bz2/__init__.py
+++ b/Lib/compression/bz2.py
diff --git a/Lib/compression/gzip/__init__.py b/Lib/compression/gzip.py
index 552f48f948a..552f48f948a 100644
--- a/Lib/compression/gzip/__init__.py
+++ b/Lib/compression/gzip.py
diff --git a/Lib/compression/lzma/__init__.py b/Lib/compression/lzma.py
index b4bc7ccb1db..b4bc7ccb1db 100644
--- a/Lib/compression/lzma/__init__.py
+++ b/Lib/compression/lzma.py
diff --git a/Lib/compression/zlib/__init__.py b/Lib/compression/zlib.py
index 3aa7e2db90e..3aa7e2db90e 100644
--- a/Lib/compression/zlib/__init__.py
+++ b/Lib/compression/zlib.py
diff --git a/Lib/compression/zstd/__init__.py b/Lib/compression/zstd/__init__.py
index e7b2f427164..84b25914b0a 100644
--- a/Lib/compression/zstd/__init__.py
+++ b/Lib/compression/zstd/__init__.py
@@ -2,28 +2,28 @@
__all__ = (
# compression.zstd
- "COMPRESSION_LEVEL_DEFAULT",
- "compress",
- "CompressionParameter",
- "decompress",
- "DecompressionParameter",
- "finalize_dict",
- "get_frame_info",
- "Strategy",
- "train_dict",
+ 'COMPRESSION_LEVEL_DEFAULT',
+ 'compress',
+ 'CompressionParameter',
+ 'decompress',
+ 'DecompressionParameter',
+ 'finalize_dict',
+ 'get_frame_info',
+ 'Strategy',
+ 'train_dict',
# compression.zstd._zstdfile
- "open",
- "ZstdFile",
+ 'open',
+ 'ZstdFile',
# _zstd
- "get_frame_size",
- "zstd_version",
- "zstd_version_info",
- "ZstdCompressor",
- "ZstdDecompressor",
- "ZstdDict",
- "ZstdError",
+ 'get_frame_size',
+ 'zstd_version',
+ 'zstd_version_info',
+ 'ZstdCompressor',
+ 'ZstdDecompressor',
+ 'ZstdDict',
+ 'ZstdError',
)
import _zstd
@@ -43,6 +43,7 @@ COMPRESSION_LEVEL_DEFAULT = _zstd.ZSTD_CLEVEL_DEFAULT
class FrameInfo:
"""Information about a Zstandard frame."""
+
__slots__ = 'decompressed_size', 'dictionary_id'
def __init__(self, decompressed_size, dictionary_id):
@@ -125,13 +126,13 @@ def finalize_dict(zstd_dict, /, samples, dict_size, level):
chunks = b''.join(samples)
chunk_sizes = tuple(_nbytes(sample) for sample in samples)
if not chunks:
- raise ValueError("The samples are empty content, can't finalize the"
+ raise ValueError("The samples are empty content, can't finalize the "
"dictionary.")
- dict_content = _zstd.finalize_dict(zstd_dict.dict_content,
- chunks, chunk_sizes,
- dict_size, level)
+ dict_content = _zstd.finalize_dict(zstd_dict.dict_content, chunks,
+ chunk_sizes, dict_size, level)
return ZstdDict(dict_content)
+
def compress(data, level=None, options=None, zstd_dict=None):
"""Return Zstandard compressed *data* as bytes.
@@ -147,6 +148,7 @@ def compress(data, level=None, options=None, zstd_dict=None):
comp = ZstdCompressor(level=level, options=options, zstd_dict=zstd_dict)
return comp.compress(data, mode=ZstdCompressor.FLUSH_FRAME)
+
def decompress(data, zstd_dict=None, options=None):
"""Decompress one or more frames of Zstandard compressed *data*.
@@ -162,12 +164,12 @@ def decompress(data, zstd_dict=None, options=None):
decomp = ZstdDecompressor(options=options, zstd_dict=zstd_dict)
results.append(decomp.decompress(data))
if not decomp.eof:
- raise ZstdError("Compressed data ended before the "
- "end-of-stream marker was reached")
+ raise ZstdError('Compressed data ended before the '
+ 'end-of-stream marker was reached')
data = decomp.unused_data
if not data:
break
- return b"".join(results)
+ return b''.join(results)
class CompressionParameter(enum.IntEnum):
diff --git a/Lib/compression/zstd/_zstdfile.py b/Lib/compression/zstd/_zstdfile.py
index 0086c13d3c1..8770e576f50 100644
--- a/Lib/compression/zstd/_zstdfile.py
+++ b/Lib/compression/zstd/_zstdfile.py
@@ -4,7 +4,7 @@ from _zstd import (ZstdCompressor, ZstdDecompressor, ZstdError,
ZSTD_DStreamOutSize)
from compression._common import _streams
-__all__ = ("ZstdFile", "open")
+__all__ = ('ZstdFile', 'open')
_MODE_CLOSED = 0
_MODE_READ = 1
@@ -31,15 +31,15 @@ class ZstdFile(_streams.BaseStream):
FLUSH_BLOCK = ZstdCompressor.FLUSH_BLOCK
FLUSH_FRAME = ZstdCompressor.FLUSH_FRAME
- def __init__(self, file, /, mode="r", *,
+ def __init__(self, file, /, mode='r', *,
level=None, options=None, zstd_dict=None):
"""Open a Zstandard compressed file in binary mode.
*file* can be either an file-like object, or a file name to open.
- *mode* can be "r" for reading (default), "w" for (over)writing, "x" for
- creating exclusively, or "a" for appending. These can equivalently be
- given as "rb", "wb", "xb" and "ab" respectively.
+ *mode* can be 'r' for reading (default), 'w' for (over)writing, 'x' for
+ creating exclusively, or 'a' for appending. These can equivalently be
+ given as 'rb', 'wb', 'xb' and 'ab' respectively.
*level* is an optional int specifying the compression level to use,
or COMPRESSION_LEVEL_DEFAULT if not given.
@@ -57,33 +57,33 @@ class ZstdFile(_streams.BaseStream):
self._buffer = None
if not isinstance(mode, str):
- raise ValueError("mode must be a str")
+ raise ValueError('mode must be a str')
if options is not None and not isinstance(options, dict):
- raise TypeError("options must be a dict or None")
- mode = mode.removesuffix("b") # handle rb, wb, xb, ab
- if mode == "r":
+ raise TypeError('options must be a dict or None')
+ mode = mode.removesuffix('b') # handle rb, wb, xb, ab
+ if mode == 'r':
if level is not None:
- raise TypeError("level is illegal in read mode")
+ raise TypeError('level is illegal in read mode')
self._mode = _MODE_READ
- elif mode in {"w", "a", "x"}:
+ elif mode in {'w', 'a', 'x'}:
if level is not None and not isinstance(level, int):
- raise TypeError("level must be int or None")
+ raise TypeError('level must be int or None')
self._mode = _MODE_WRITE
self._compressor = ZstdCompressor(level=level, options=options,
zstd_dict=zstd_dict)
self._pos = 0
else:
- raise ValueError(f"Invalid mode: {mode!r}")
+ raise ValueError(f'Invalid mode: {mode!r}')
if isinstance(file, (str, bytes, PathLike)):
self._fp = io.open(file, f'{mode}b')
self._close_fp = True
- elif ((mode == 'r' and hasattr(file, "read"))
- or (mode != 'r' and hasattr(file, "write"))):
+ elif ((mode == 'r' and hasattr(file, 'read'))
+ or (mode != 'r' and hasattr(file, 'write'))):
self._fp = file
else:
- raise TypeError("file must be a file-like object "
- "or a str, bytes, or PathLike object")
+ raise TypeError('file must be a file-like object '
+ 'or a str, bytes, or PathLike object')
if self._mode == _MODE_READ:
raw = _streams.DecompressReader(
@@ -151,22 +151,22 @@ class ZstdFile(_streams.BaseStream):
return
self._check_not_closed()
if mode not in {self.FLUSH_BLOCK, self.FLUSH_FRAME}:
- raise ValueError("Invalid mode argument, expected either "
- "ZstdFile.FLUSH_FRAME or "
- "ZstdFile.FLUSH_BLOCK")
+ raise ValueError('Invalid mode argument, expected either '
+ 'ZstdFile.FLUSH_FRAME or '
+ 'ZstdFile.FLUSH_BLOCK')
if self._compressor.last_mode == mode:
return
# Flush zstd block/frame, and write.
data = self._compressor.flush(mode)
self._fp.write(data)
- if hasattr(self._fp, "flush"):
+ if hasattr(self._fp, 'flush'):
self._fp.flush()
def read(self, size=-1):
"""Read up to size uncompressed bytes from the file.
If size is negative or omitted, read until EOF is reached.
- Returns b"" if the file is already at EOF.
+ Returns b'' if the file is already at EOF.
"""
if size is None:
size = -1
@@ -178,7 +178,7 @@ class ZstdFile(_streams.BaseStream):
making multiple reads from the underlying stream. Reads up to a
buffer's worth of data if size is negative.
- Returns b"" if the file is at EOF.
+ Returns b'' if the file is at EOF.
"""
self._check_can_read()
if size < 0:
@@ -293,7 +293,7 @@ class ZstdFile(_streams.BaseStream):
return self._mode == _MODE_WRITE
-def open(file, /, mode="rb", *, level=None, options=None, zstd_dict=None,
+def open(file, /, mode='rb', *, level=None, options=None, zstd_dict=None,
encoding=None, errors=None, newline=None):
"""Open a Zstandard compressed file in binary or text mode.
@@ -301,8 +301,8 @@ def open(file, /, mode="rb", *, level=None, options=None, zstd_dict=None,
in which case the named file is opened, or it can be an existing file object
to read from or write to.
- The mode parameter can be "r", "rb" (default), "w", "wb", "x", "xb", "a",
- "ab" for binary mode, or "rt", "wt", "xt", "at" for text mode.
+ The mode parameter can be 'r', 'rb' (default), 'w', 'wb', 'x', 'xb', 'a',
+ 'ab' for binary mode, or 'rt', 'wt', 'xt', 'at' for text mode.
The level, options, and zstd_dict parameters specify the settings the same
as ZstdFile.
@@ -323,19 +323,19 @@ def open(file, /, mode="rb", *, level=None, options=None, zstd_dict=None,
behavior, and line ending(s).
"""
- text_mode = "t" in mode
- mode = mode.replace("t", "")
+ text_mode = 't' in mode
+ mode = mode.replace('t', '')
if text_mode:
- if "b" in mode:
- raise ValueError(f"Invalid mode: {mode!r}")
+ if 'b' in mode:
+ raise ValueError(f'Invalid mode: {mode!r}')
else:
if encoding is not None:
- raise ValueError("Argument 'encoding' not supported in binary mode")
+ raise ValueError('Argument "encoding" not supported in binary mode')
if errors is not None:
- raise ValueError("Argument 'errors' not supported in binary mode")
+ raise ValueError('Argument "errors" not supported in binary mode')
if newline is not None:
- raise ValueError("Argument 'newline' not supported in binary mode")
+ raise ValueError('Argument "newline" not supported in binary mode')
binary_file = ZstdFile(file, mode, level=level, options=options,
zstd_dict=zstd_dict)
diff --git a/Lib/getpass.py b/Lib/getpass.py
index f571425e541..1dd40e25e09 100644
--- a/Lib/getpass.py
+++ b/Lib/getpass.py
@@ -119,9 +119,9 @@ def win_getpass(prompt='Password: ', stream=None, *, echo_char=None):
raise KeyboardInterrupt
if c == '\b':
if echo_char and pw:
- msvcrt.putch('\b')
- msvcrt.putch(' ')
- msvcrt.putch('\b')
+ msvcrt.putwch('\b')
+ msvcrt.putwch(' ')
+ msvcrt.putwch('\b')
pw = pw[:-1]
else:
pw = pw + c
@@ -132,14 +132,15 @@ def win_getpass(prompt='Password: ', stream=None, *, echo_char=None):
return pw
-def fallback_getpass(prompt='Password: ', stream=None):
+def fallback_getpass(prompt='Password: ', stream=None, *, echo_char=None):
+ _check_echo_char(echo_char)
import warnings
warnings.warn("Can not control echo on the terminal.", GetPassWarning,
stacklevel=2)
if not stream:
stream = sys.stderr
print("Warning: Password input may be echoed.", file=stream)
- return _raw_input(prompt, stream)
+ return _raw_input(prompt, stream, echo_char=echo_char)
def _check_echo_char(echo_char):
diff --git a/Lib/glob.py b/Lib/glob.py
index 341524282ba..1e48fe43167 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -358,6 +358,12 @@ class _GlobberBase:
"""
raise NotImplementedError
+ @staticmethod
+ def stringify_path(path):
+ """Converts the path to a string object
+ """
+ raise NotImplementedError
+
# High-level methods
def compile(self, pat, altsep=None):
@@ -466,8 +472,9 @@ class _GlobberBase:
select_next = self.selector(parts)
def select_recursive(path, exists=False):
- match_pos = len(str(path))
- if match is None or match(str(path), match_pos):
+ path_str = self.stringify_path(path)
+ match_pos = len(path_str)
+ if match is None or match(path_str, match_pos):
yield from select_next(path, exists)
stack = [path]
while stack:
@@ -489,7 +496,7 @@ class _GlobberBase:
pass
if is_dir or not dir_only:
- entry_path_str = str(entry_path)
+ entry_path_str = self.stringify_path(entry_path)
if dir_only:
entry_path = self.concat_path(entry_path, self.sep)
if match is None or match(entry_path_str, match_pos):
@@ -529,19 +536,6 @@ class _StringGlobber(_GlobberBase):
entries = list(scandir_it)
return ((entry, entry.name, entry.path) for entry in entries)
-
-class _PathGlobber(_GlobberBase):
- """Provides shell-style pattern matching and globbing for pathlib paths.
- """
-
@staticmethod
- def lexists(path):
- return path.info.exists(follow_symlinks=False)
-
- @staticmethod
- def scandir(path):
- return ((child.info, child.name, child) for child in path.iterdir())
-
- @staticmethod
- def concat_path(path, text):
- return path.with_segments(str(path) + text)
+ def stringify_path(path):
+ return path # Already a string.
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index 283a1055182..f2d1a02629d 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -2057,6 +2057,15 @@ def basicConfig(**kwargs):
created FileHandler, causing it to be used when the file is
opened in text mode. If not specified, the default value is
`backslashreplace`.
+ formatter If specified, set this formatter instance for all involved
+ handlers.
+ If not specified, the default is to create and use an instance of
+ `logging.Formatter` based on arguments 'format', 'datefmt' and
+ 'style'.
+ When 'formatter' is specified together with any of the three
+ arguments 'format', 'datefmt' and 'style', a `ValueError`
+ is raised to signal that these arguments would lose meaning
+ otherwise.
Note that you could specify a stream created using open(filename, mode)
rather than passing the filename and mode in. However, it should be
@@ -2079,6 +2088,9 @@ def basicConfig(**kwargs):
.. versionchanged:: 3.9
Added the ``encoding`` and ``errors`` parameters.
+
+ .. versionchanged:: 3.15
+ Added the ``formatter`` parameter.
"""
# Add thread safety in case someone mistakenly calls
# basicConfig() from multiple threads
@@ -2114,13 +2126,19 @@ def basicConfig(**kwargs):
stream = kwargs.pop("stream", None)
h = StreamHandler(stream)
handlers = [h]
- dfs = kwargs.pop("datefmt", None)
- style = kwargs.pop("style", '%')
- if style not in _STYLES:
- raise ValueError('Style must be one of: %s' % ','.join(
- _STYLES.keys()))
- fs = kwargs.pop("format", _STYLES[style][1])
- fmt = Formatter(fs, dfs, style)
+ fmt = kwargs.pop("formatter", None)
+ if fmt is None:
+ dfs = kwargs.pop("datefmt", None)
+ style = kwargs.pop("style", '%')
+ if style not in _STYLES:
+ raise ValueError('Style must be one of: %s' % ','.join(
+ _STYLES.keys()))
+ fs = kwargs.pop("format", _STYLES[style][1])
+ fmt = Formatter(fs, dfs, style)
+ else:
+ for forbidden_key in ("datefmt", "format", "style"):
+ if forbidden_key in kwargs:
+ raise ValueError(f"{forbidden_key!r} should not be specified together with 'formatter'")
for h in handlers:
if h.formatter is None:
h.setFormatter(fmt)
diff --git a/Lib/pathlib/__init__.py b/Lib/pathlib/__init__.py
index 12cf9f579cb..2dc1f7f7126 100644
--- a/Lib/pathlib/__init__.py
+++ b/Lib/pathlib/__init__.py
@@ -28,8 +28,9 @@ except ImportError:
from pathlib._os import (
PathInfo, DirEntryInfo,
+ magic_open, vfspath,
ensure_different_files, ensure_distinct_paths,
- copyfile2, copyfileobj, magic_open, copy_info,
+ copyfile2, copyfileobj, copy_info,
)
@@ -1164,12 +1165,12 @@ class Path(PurePath):
# os.symlink() incorrectly creates a file-symlink on Windows. Avoid
# this by passing *target_is_dir* to os.symlink() on Windows.
def _copy_from_symlink(self, source, preserve_metadata=False):
- os.symlink(str(source.readlink()), self, source.info.is_dir())
+ os.symlink(vfspath(source.readlink()), self, source.info.is_dir())
if preserve_metadata:
copy_info(source.info, self, follow_symlinks=False)
else:
def _copy_from_symlink(self, source, preserve_metadata=False):
- os.symlink(str(source.readlink()), self)
+ os.symlink(vfspath(source.readlink()), self)
if preserve_metadata:
copy_info(source.info, self, follow_symlinks=False)
diff --git a/Lib/pathlib/_os.py b/Lib/pathlib/_os.py
index 039836941dd..62a4adb555e 100644
--- a/Lib/pathlib/_os.py
+++ b/Lib/pathlib/_os.py
@@ -210,6 +210,26 @@ def magic_open(path, mode='r', buffering=-1, encoding=None, errors=None,
raise TypeError(f"{cls.__name__} can't be opened with mode {mode!r}")
+def vfspath(path):
+ """
+ Return the string representation of a virtual path object.
+ """
+ try:
+ return os.fsdecode(path)
+ except TypeError:
+ pass
+
+ path_type = type(path)
+ try:
+ return path_type.__vfspath__(path)
+ except AttributeError:
+ if hasattr(path_type, '__vfspath__'):
+ raise
+
+ raise TypeError("expected str, bytes, os.PathLike or JoinablePath "
+ "object, not " + path_type.__name__)
+
+
def ensure_distinct_paths(source, target):
"""
Raise OSError(EINVAL) if the other path is within this path.
@@ -225,8 +245,8 @@ def ensure_distinct_paths(source, target):
err = OSError(EINVAL, "Source path is a parent of target path")
else:
return
- err.filename = str(source)
- err.filename2 = str(target)
+ err.filename = vfspath(source)
+ err.filename2 = vfspath(target)
raise err
@@ -247,8 +267,8 @@ def ensure_different_files(source, target):
except (OSError, ValueError):
return
err = OSError(EINVAL, "Source and target are the same file")
- err.filename = str(source)
- err.filename2 = str(target)
+ err.filename = vfspath(source)
+ err.filename2 = vfspath(target)
raise err
diff --git a/Lib/pathlib/types.py b/Lib/pathlib/types.py
index d8f5c34a1a7..42b80221608 100644
--- a/Lib/pathlib/types.py
+++ b/Lib/pathlib/types.py
@@ -11,9 +11,10 @@ Protocols for supporting classes in pathlib.
from abc import ABC, abstractmethod
-from glob import _PathGlobber
+from glob import _GlobberBase
from io import text_encoding
-from pathlib._os import magic_open, ensure_distinct_paths, ensure_different_files, copyfileobj
+from pathlib._os import (magic_open, vfspath, ensure_distinct_paths,
+ ensure_different_files, copyfileobj)
from pathlib import PurePath, Path
from typing import Optional, Protocol, runtime_checkable
@@ -60,6 +61,25 @@ class PathInfo(Protocol):
def is_symlink(self) -> bool: ...
+class _PathGlobber(_GlobberBase):
+ """Provides shell-style pattern matching and globbing for ReadablePath.
+ """
+
+ @staticmethod
+ def lexists(path):
+ return path.info.exists(follow_symlinks=False)
+
+ @staticmethod
+ def scandir(path):
+ return ((child.info, child.name, child) for child in path.iterdir())
+
+ @staticmethod
+ def concat_path(path, text):
+ return path.with_segments(vfspath(path) + text)
+
+ stringify_path = staticmethod(vfspath)
+
+
class _JoinablePath(ABC):
"""Abstract base class for pure path objects.
@@ -86,20 +106,19 @@ class _JoinablePath(ABC):
raise NotImplementedError
@abstractmethod
- def __str__(self):
- """Return the string representation of the path, suitable for
- passing to system calls."""
+ def __vfspath__(self):
+ """Return the string representation of the path."""
raise NotImplementedError
@property
def anchor(self):
"""The concatenation of the drive and root, or ''."""
- return _explode_path(str(self), self.parser.split)[0]
+ return _explode_path(vfspath(self), self.parser.split)[0]
@property
def name(self):
"""The final path component, if any."""
- return self.parser.split(str(self))[1]
+ return self.parser.split(vfspath(self))[1]
@property
def suffix(self):
@@ -135,7 +154,7 @@ class _JoinablePath(ABC):
split = self.parser.split
if split(name)[0]:
raise ValueError(f"Invalid name {name!r}")
- path = str(self)
+ path = vfspath(self)
path = path.removesuffix(split(path)[1]) + name
return self.with_segments(path)
@@ -168,7 +187,7 @@ class _JoinablePath(ABC):
def parts(self):
"""An object providing sequence-like access to the
components in the filesystem path."""
- anchor, parts = _explode_path(str(self), self.parser.split)
+ anchor, parts = _explode_path(vfspath(self), self.parser.split)
if anchor:
parts.append(anchor)
return tuple(reversed(parts))
@@ -179,24 +198,24 @@ class _JoinablePath(ABC):
paths) or a totally different path (if one of the arguments is
anchored).
"""
- return self.with_segments(str(self), *pathsegments)
+ return self.with_segments(vfspath(self), *pathsegments)
def __truediv__(self, key):
try:
- return self.with_segments(str(self), key)
+ return self.with_segments(vfspath(self), key)
except TypeError:
return NotImplemented
def __rtruediv__(self, key):
try:
- return self.with_segments(key, str(self))
+ return self.with_segments(key, vfspath(self))
except TypeError:
return NotImplemented
@property
def parent(self):
"""The logical parent of the path."""
- path = str(self)
+ path = vfspath(self)
parent = self.parser.split(path)[0]
if path != parent:
return self.with_segments(parent)
@@ -206,7 +225,7 @@ class _JoinablePath(ABC):
def parents(self):
"""A sequence of this path's logical parents."""
split = self.parser.split
- path = str(self)
+ path = vfspath(self)
parent = split(path)[0]
parents = []
while path != parent:
@@ -223,7 +242,7 @@ class _JoinablePath(ABC):
case_sensitive = self.parser.normcase('Aa') == 'Aa'
globber = _PathGlobber(self.parser.sep, case_sensitive, recursive=True)
match = globber.compile(pattern, altsep=self.parser.altsep)
- return match(str(self)) is not None
+ return match(vfspath(self)) is not None
class _ReadablePath(_JoinablePath):
@@ -412,7 +431,7 @@ class _WritablePath(_JoinablePath):
while stack:
src, dst = stack.pop()
if not follow_symlinks and src.info.is_symlink():
- dst.symlink_to(str(src.readlink()), src.info.is_dir())
+ dst.symlink_to(vfspath(src.readlink()), src.info.is_dir())
elif src.info.is_dir():
children = src.iterdir()
dst.mkdir()
diff --git a/Lib/pdb.py b/Lib/pdb.py
index f89d104fcdd..544c701bbd2 100644
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -3383,8 +3383,7 @@ def _connect(
f"\nLocal pdb module's protocol version: {attach_ver}"
)
else:
- remote_pdb.rcLines.extend(commands.splitlines())
- remote_pdb.set_trace(frame=frame)
+ remote_pdb.set_trace(frame=frame, commands=commands.splitlines())
def attach(pid, commands=()):
diff --git a/Lib/tarfile.py b/Lib/tarfile.py
index c0f5a609b9f..13889d76802 100644
--- a/Lib/tarfile.py
+++ b/Lib/tarfile.py
@@ -2065,7 +2065,7 @@ class TarFile(object):
"gz": "gzopen", # gzip compressed tar
"bz2": "bz2open", # bzip2 compressed tar
"xz": "xzopen", # lzma compressed tar
- "zst": "zstopen" # zstd compressed tar
+ "zst": "zstopen", # zstd compressed tar
}
#--------------------------------------------------------------------------
diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py
index 1b690cb88bf..6a20a1eb03e 100644
--- a/Lib/test/_test_multiprocessing.py
+++ b/Lib/test/_test_multiprocessing.py
@@ -513,9 +513,14 @@ class _TestProcess(BaseTestCase):
time.sleep(100)
@classmethod
- def _sleep_no_int_handler(cls):
+ def _sleep_some_event(cls, event):
+ event.set()
+ time.sleep(100)
+
+ @classmethod
+ def _sleep_no_int_handler(cls, event):
signal.signal(signal.SIGINT, signal.SIG_DFL)
- cls._sleep_some()
+ cls._sleep_some_event(event)
@classmethod
def _test_sleep(cls, delay):
@@ -525,7 +530,10 @@ class _TestProcess(BaseTestCase):
if self.TYPE == 'threads':
self.skipTest('test not appropriate for {}'.format(self.TYPE))
- p = self.Process(target=target or self._sleep_some)
+ event = self.Event()
+ if not target:
+ target = self._sleep_some_event
+ p = self.Process(target=target, args=(event,))
p.daemon = True
p.start()
@@ -543,8 +551,11 @@ class _TestProcess(BaseTestCase):
self.assertTimingAlmostEqual(join.elapsed, 0.0)
self.assertEqual(p.is_alive(), True)
- # XXX maybe terminating too soon causes the problems on Gentoo...
- time.sleep(1)
+ timeout = support.SHORT_TIMEOUT
+ if not event.wait(timeout):
+ p.terminate()
+ p.join()
+ self.fail(f"event not signaled in {timeout} seconds")
meth(p)
diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py
index c74c3a31909..9b6e80fdad9 100644
--- a/Lib/test/support/__init__.py
+++ b/Lib/test/support/__init__.py
@@ -696,9 +696,11 @@ def sortdict(dict):
return "{%s}" % withcommas
-def run_code(code: str) -> dict[str, object]:
+def run_code(code: str, extra_names: dict[str, object] | None = None) -> dict[str, object]:
"""Run a piece of code after dedenting it, and return its global namespace."""
ns = {}
+ if extra_names:
+ ns.update(extra_names)
exec(textwrap.dedent(code), ns)
return ns
diff --git a/Lib/test/support/strace_helper.py b/Lib/test/support/strace_helper.py
index 1a9d2b520b7..cf95f7bdc7d 100644
--- a/Lib/test/support/strace_helper.py
+++ b/Lib/test/support/strace_helper.py
@@ -38,7 +38,7 @@ class StraceResult:
This assumes the program under inspection doesn't print any non-utf8
strings which would mix into the strace output."""
- decoded_events = self.event_bytes.decode('utf-8')
+ decoded_events = self.event_bytes.decode('utf-8', 'surrogateescape')
matches = [
_syscall_regex.match(event)
for event in decoded_events.splitlines()
diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py
index 5a6be1180c1..58853ba4eb3 100644
--- a/Lib/test/test_argparse.py
+++ b/Lib/test/test_argparse.py
@@ -5469,11 +5469,60 @@ class TestHelpMetavarTypeFormatter(HelpTestCase):
version = ''
-class TestHelpUsageLongSubparserCommand(TestCase):
- """Test that subparser commands are formatted correctly in help"""
+class TestHelpCustomHelpFormatter(TestCase):
maxDiff = None
- def test_parent_help(self):
+ def test_custom_formatter_function(self):
+ def custom_formatter(prog):
+ return argparse.RawTextHelpFormatter(prog, indent_increment=5)
+
+ parser = argparse.ArgumentParser(
+ prog='PROG',
+ prefix_chars='-+',
+ formatter_class=custom_formatter
+ )
+ parser.add_argument('+f', '++foo', help="foo help")
+ parser.add_argument('spam', help="spam help")
+
+ parser_help = parser.format_help()
+ self.assertEqual(parser_help, textwrap.dedent('''\
+ usage: PROG [-h] [+f FOO] spam
+
+ positional arguments:
+ spam spam help
+
+ options:
+ -h, --help show this help message and exit
+ +f, ++foo FOO foo help
+ '''))
+
+ def test_custom_formatter_class(self):
+ class CustomFormatter(argparse.RawTextHelpFormatter):
+ def __init__(self, prog):
+ super().__init__(prog, indent_increment=5)
+
+ parser = argparse.ArgumentParser(
+ prog='PROG',
+ prefix_chars='-+',
+ formatter_class=CustomFormatter
+ )
+ parser.add_argument('+f', '++foo', help="foo help")
+ parser.add_argument('spam', help="spam help")
+
+ parser_help = parser.format_help()
+ self.assertEqual(parser_help, textwrap.dedent('''\
+ usage: PROG [-h] [+f FOO] spam
+
+ positional arguments:
+ spam spam help
+
+ options:
+ -h, --help show this help message and exit
+ +f, ++foo FOO foo help
+ '''))
+
+ def test_usage_long_subparser_command(self):
+ """Test that subparser commands are formatted correctly in help"""
def custom_formatter(prog):
return argparse.RawTextHelpFormatter(prog, max_help_position=50)
@@ -6973,7 +7022,7 @@ class TestProgName(TestCase):
def check_usage(self, expected, *args, **kwargs):
res = script_helper.assert_python_ok('-Xutf8', *args, '-h', **kwargs)
- self.assertEqual(res.out.splitlines()[0].decode(),
+ self.assertEqual(os.fsdecode(res.out.splitlines()[0]),
f'usage: {expected} [-h]')
def test_script(self, compiled=False):
@@ -7053,6 +7102,7 @@ class TestTranslations(TestTranslationsBase):
class TestColorized(TestCase):
+ maxDiff = None
def setUp(self):
super().setUp()
@@ -7211,6 +7261,79 @@ class TestColorized(TestCase):
),
)
+ def test_custom_formatter_function(self):
+ def custom_formatter(prog):
+ return argparse.RawTextHelpFormatter(prog, indent_increment=5)
+
+ parser = argparse.ArgumentParser(
+ prog="PROG",
+ prefix_chars="-+",
+ formatter_class=custom_formatter,
+ color=True,
+ )
+ parser.add_argument('+f', '++foo', help="foo help")
+ parser.add_argument('spam', help="spam help")
+
+ prog = self.theme.prog
+ heading = self.theme.heading
+ short = self.theme.summary_short_option
+ label = self.theme.summary_label
+ pos = self.theme.summary_action
+ long_b = self.theme.long_option
+ short_b = self.theme.short_option
+ label_b = self.theme.label
+ pos_b = self.theme.action
+ reset = self.theme.reset
+
+ parser_help = parser.format_help()
+ self.assertEqual(parser_help, textwrap.dedent(f'''\
+ {heading}usage: {reset}{prog}PROG{reset} [{short}-h{reset}] [{short}+f {label}FOO{reset}] {pos}spam{reset}
+
+ {heading}positional arguments:{reset}
+ {pos_b}spam{reset} spam help
+
+ {heading}options:{reset}
+ {short_b}-h{reset}, {long_b}--help{reset} show this help message and exit
+ {short_b}+f{reset}, {long_b}++foo{reset} {label_b}FOO{reset} foo help
+ '''))
+
+ def test_custom_formatter_class(self):
+ class CustomFormatter(argparse.RawTextHelpFormatter):
+ def __init__(self, prog):
+ super().__init__(prog, indent_increment=5)
+
+ parser = argparse.ArgumentParser(
+ prog="PROG",
+ prefix_chars="-+",
+ formatter_class=CustomFormatter,
+ color=True,
+ )
+ parser.add_argument('+f', '++foo', help="foo help")
+ parser.add_argument('spam', help="spam help")
+
+ prog = self.theme.prog
+ heading = self.theme.heading
+ short = self.theme.summary_short_option
+ label = self.theme.summary_label
+ pos = self.theme.summary_action
+ long_b = self.theme.long_option
+ short_b = self.theme.short_option
+ label_b = self.theme.label
+ pos_b = self.theme.action
+ reset = self.theme.reset
+
+ parser_help = parser.format_help()
+ self.assertEqual(parser_help, textwrap.dedent(f'''\
+ {heading}usage: {reset}{prog}PROG{reset} [{short}-h{reset}] [{short}+f {label}FOO{reset}] {pos}spam{reset}
+
+ {heading}positional arguments:{reset}
+ {pos_b}spam{reset} spam help
+
+ {heading}options:{reset}
+ {short_b}-h{reset}, {long_b}--help{reset} show this help message and exit
+ {short_b}+f{reset}, {long_b}++foo{reset} {label_b}FOO{reset} foo help
+ '''))
+
def tearDownModule():
# Remove global references to avoid looking like we have refleaks.
diff --git a/Lib/test/test_asyncio/test_tools.py b/Lib/test/test_asyncio/test_tools.py
index 0413e236c27..ba36e759ccd 100644
--- a/Lib/test/test_asyncio/test_tools.py
+++ b/Lib/test/test_asyncio/test_tools.py
@@ -791,21 +791,21 @@ class TestAsyncioToolsBasic(unittest.TestCase):
class TestAsyncioToolsEdgeCases(unittest.TestCase):
def test_task_awaits_self(self):
- """A task directly awaits itself – should raise a cycle."""
+ """A task directly awaits itself - should raise a cycle."""
input_ = [(1, [(1, "Self-Awaiter", [[["loopback"], 1]])])]
with self.assertRaises(tools.CycleFoundException) as ctx:
tools.build_async_tree(input_)
self.assertIn([1, 1], ctx.exception.cycles)
def test_task_with_missing_awaiter_id(self):
- """Awaiter ID not in task list – should not crash, just show 'Unknown'."""
+ """Awaiter ID not in task list - should not crash, just show 'Unknown'."""
input_ = [(1, [(1, "Task-A", [[["coro"], 999]])])] # 999 not defined
table = tools.build_task_table(input_)
self.assertEqual(len(table), 1)
self.assertEqual(table[0][4], "Unknown")
def test_duplicate_coroutine_frames(self):
- """Same coroutine frame repeated under a parent – should deduplicate."""
+ """Same coroutine frame repeated under a parent - should deduplicate."""
input_ = [
(
1,
@@ -829,7 +829,7 @@ class TestAsyncioToolsEdgeCases(unittest.TestCase):
self.assertIn("Task-1", flat)
def test_task_with_no_name(self):
- """Task with no name in id2name – should still render with fallback."""
+ """Task with no name in id2name - should still render with fallback."""
input_ = [(1, [(1, "root", [[["f1"], 2]]), (2, None, [])])]
# If name is None, fallback to string should not crash
tree = tools.build_async_tree(input_)
diff --git a/Lib/test/test_codeccallbacks.py b/Lib/test/test_codeccallbacks.py
index 86e5e5c1474..a767f67a02c 100644
--- a/Lib/test/test_codeccallbacks.py
+++ b/Lib/test/test_codeccallbacks.py
@@ -2,6 +2,7 @@ from _codecs import _unregister_error as _codecs_unregister_error
import codecs
import html.entities
import itertools
+import re
import sys
import unicodedata
import unittest
@@ -1125,7 +1126,7 @@ class CodecCallbackTest(unittest.TestCase):
text = 'abc<def>ghi'*n
text.translate(charmap)
- def test_mutatingdecodehandler(self):
+ def test_mutating_decode_handler(self):
baddata = [
("ascii", b"\xff"),
("utf-7", b"++"),
@@ -1160,6 +1161,42 @@ class CodecCallbackTest(unittest.TestCase):
for (encoding, data) in baddata:
self.assertEqual(data.decode(encoding, "test.mutating"), "\u4242")
+ def test_mutating_decode_handler_unicode_escape(self):
+ decode = codecs.unicode_escape_decode
+ def mutating(exc):
+ if isinstance(exc, UnicodeDecodeError):
+ r = data.get(exc.object[:exc.end])
+ if r is not None:
+ exc.object = r[0] + exc.object[exc.end:]
+ return ('\u0404', r[1])
+ raise AssertionError("don't know how to handle %r" % exc)
+
+ codecs.register_error('test.mutating2', mutating)
+ data = {
+ br'\x0': (b'\\', 0),
+ br'\x3': (b'xxx\\', 3),
+ br'\x5': (b'x\\', 1),
+ }
+ def check(input, expected, msg):
+ with self.assertWarns(DeprecationWarning) as cm:
+ self.assertEqual(decode(input, 'test.mutating2'), (expected, len(input)))
+ self.assertIn(msg, str(cm.warning))
+
+ check(br'\x0n\z', '\u0404\n\\z', r'"\z" is an invalid escape sequence')
+ check(br'\x0n\501', '\u0404\n\u0141', r'"\501" is an invalid octal escape sequence')
+ check(br'\x0z', '\u0404\\z', r'"\z" is an invalid escape sequence')
+
+ check(br'\x3n\zr', '\u0404\n\\zr', r'"\z" is an invalid escape sequence')
+ check(br'\x3zr', '\u0404\\zr', r'"\z" is an invalid escape sequence')
+ check(br'\x3z5', '\u0404\\z5', r'"\z" is an invalid escape sequence')
+ check(memoryview(br'\x3z5x')[:-1], '\u0404\\z5', r'"\z" is an invalid escape sequence')
+ check(memoryview(br'\x3z5xy')[:-2], '\u0404\\z5', r'"\z" is an invalid escape sequence')
+
+ check(br'\x5n\z', '\u0404\n\\z', r'"\z" is an invalid escape sequence')
+ check(br'\x5n\501', '\u0404\n\u0141', r'"\501" is an invalid octal escape sequence')
+ check(br'\x5z', '\u0404\\z', r'"\z" is an invalid escape sequence')
+ check(memoryview(br'\x5zy')[:-1], '\u0404\\z', r'"\z" is an invalid escape sequence')
+
# issue32583
def test_crashing_decode_handler(self):
# better generating one more character to fill the extra space slot
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index 94fcf98e757..d42270da15e 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -1196,23 +1196,39 @@ class EscapeDecodeTest(unittest.TestCase):
check(br"[\1010]", b"[A0]")
check(br"[\x41]", b"[A]")
check(br"[\x410]", b"[A0]")
+
+ def test_warnings(self):
+ decode = codecs.escape_decode
+ check = coding_checker(self, decode)
for i in range(97, 123):
b = bytes([i])
if b not in b'abfnrtvx':
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\%c" is an invalid escape sequence' % i):
check(b"\\" + b, b"\\" + b)
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\%c" is an invalid escape sequence' % (i-32)):
check(b"\\" + b.upper(), b"\\" + b.upper())
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\8" is an invalid escape sequence'):
check(br"\8", b"\\8")
with self.assertWarns(DeprecationWarning):
check(br"\9", b"\\9")
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\\xfa" is an invalid escape sequence') as cm:
check(b"\\\xfa", b"\\\xfa")
for i in range(0o400, 0o1000):
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\%o" is an invalid octal escape sequence' % i):
check(rb'\%o' % i, bytes([i & 0o377]))
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\z" is an invalid escape sequence'):
+ self.assertEqual(decode(br'\x\z', 'ignore'), (b'\\z', 4))
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\501" is an invalid octal escape sequence'):
+ self.assertEqual(decode(br'\x\501', 'ignore'), (b'A', 6))
+
def test_errors(self):
decode = codecs.escape_decode
self.assertRaises(ValueError, decode, br"\x")
@@ -2661,24 +2677,40 @@ class UnicodeEscapeTest(ReadTest, unittest.TestCase):
check(br"[\x410]", "[A0]")
check(br"\u20ac", "\u20ac")
check(br"\U0001d120", "\U0001d120")
+
+ def test_decode_warnings(self):
+ decode = codecs.unicode_escape_decode
+ check = coding_checker(self, decode)
for i in range(97, 123):
b = bytes([i])
if b not in b'abfnrtuvx':
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\%c" is an invalid escape sequence' % i):
check(b"\\" + b, "\\" + chr(i))
if b.upper() not in b'UN':
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\%c" is an invalid escape sequence' % (i-32)):
check(b"\\" + b.upper(), "\\" + chr(i-32))
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\8" is an invalid escape sequence'):
check(br"\8", "\\8")
with self.assertWarns(DeprecationWarning):
check(br"\9", "\\9")
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\\xfa" is an invalid escape sequence') as cm:
check(b"\\\xfa", "\\\xfa")
for i in range(0o400, 0o1000):
- with self.assertWarns(DeprecationWarning):
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\%o" is an invalid octal escape sequence' % i):
check(rb'\%o' % i, chr(i))
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\z" is an invalid escape sequence'):
+ self.assertEqual(decode(br'\x\z', 'ignore'), ('\\z', 4))
+ with self.assertWarnsRegex(DeprecationWarning,
+ r'"\\501" is an invalid octal escape sequence'):
+ self.assertEqual(decode(br'\x\501', 'ignore'), ('\u0141', 6))
+
def test_decode_errors(self):
decode = codecs.unicode_escape_decode
for c, d in (b'x', 2), (b'u', 4), (b'U', 4):
diff --git a/Lib/test/test_crossinterp.py b/Lib/test/test_crossinterp.py
index b366a29645e..cddacbc9970 100644
--- a/Lib/test/test_crossinterp.py
+++ b/Lib/test/test_crossinterp.py
@@ -758,6 +758,40 @@ class CodeTests(_GetXIDataTests):
])
+class ShareableFuncTests(_GetXIDataTests):
+
+ MODE = 'func'
+
+ def test_stateless(self):
+ self.assert_roundtrip_not_equal([
+ *defs.STATELESS_FUNCTIONS,
+ # Generators can be stateless too.
+ *defs.FUNCTION_LIKE,
+ ])
+
+ def test_not_stateless(self):
+ self.assert_not_shareable([
+ *(f for f in defs.FUNCTIONS
+ if f not in defs.STATELESS_FUNCTIONS),
+ ])
+
+ def test_other_objects(self):
+ self.assert_not_shareable([
+ None,
+ True,
+ False,
+ Ellipsis,
+ NotImplemented,
+ 9999,
+ 'spam',
+ b'spam',
+ (),
+ [],
+ {},
+ object(),
+ ])
+
+
class PureShareableScriptTests(_GetXIDataTests):
MODE = 'script-pure'
diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py
index 69f1a098920..10a6d071b0f 100644
--- a/Lib/test/test_dict.py
+++ b/Lib/test/test_dict.py
@@ -1039,10 +1039,8 @@ class DictTest(unittest.TestCase):
a = C()
a.x = 1
d = a.__dict__
- before_resize = sys.getsizeof(d)
d[2] = 2 # split table is resized to a generic combined table
- self.assertGreater(sys.getsizeof(d), before_resize)
self.assertEqual(list(d), ['x', 2])
def test_iterator_pickling(self):
diff --git a/Lib/test/test_fcntl.py b/Lib/test/test_fcntl.py
index b84c98ef3a2..e0e6782258f 100644
--- a/Lib/test/test_fcntl.py
+++ b/Lib/test/test_fcntl.py
@@ -228,6 +228,52 @@ class TestFcntl(unittest.TestCase):
os.close(test_pipe_r)
os.close(test_pipe_w)
+ def _check_fcntl_not_mutate_len(self, nbytes=None):
+ self.f = open(TESTFN, 'wb')
+ buf = struct.pack('ii', fcntl.F_OWNER_PID, os.getpid())
+ if nbytes is not None:
+ buf += b' ' * (nbytes - len(buf))
+ else:
+ nbytes = len(buf)
+ save_buf = bytes(buf)
+ r = fcntl.fcntl(self.f, fcntl.F_SETOWN_EX, buf)
+ self.assertIsInstance(r, bytes)
+ self.assertEqual(len(r), len(save_buf))
+ self.assertEqual(buf, save_buf)
+ type, pid = memoryview(r).cast('i')[:2]
+ self.assertEqual(type, fcntl.F_OWNER_PID)
+ self.assertEqual(pid, os.getpid())
+
+ buf = b' ' * nbytes
+ r = fcntl.fcntl(self.f, fcntl.F_GETOWN_EX, buf)
+ self.assertIsInstance(r, bytes)
+ self.assertEqual(len(r), len(save_buf))
+ self.assertEqual(buf, b' ' * nbytes)
+ type, pid = memoryview(r).cast('i')[:2]
+ self.assertEqual(type, fcntl.F_OWNER_PID)
+ self.assertEqual(pid, os.getpid())
+
+ buf = memoryview(b' ' * nbytes)
+ r = fcntl.fcntl(self.f, fcntl.F_GETOWN_EX, buf)
+ self.assertIsInstance(r, bytes)
+ self.assertEqual(len(r), len(save_buf))
+ self.assertEqual(bytes(buf), b' ' * nbytes)
+ type, pid = memoryview(r).cast('i')[:2]
+ self.assertEqual(type, fcntl.F_OWNER_PID)
+ self.assertEqual(pid, os.getpid())
+
+ @unittest.skipUnless(
+ hasattr(fcntl, "F_SETOWN_EX") and hasattr(fcntl, "F_GETOWN_EX"),
+ "requires F_SETOWN_EX and F_GETOWN_EX")
+ def test_fcntl_small_buffer(self):
+ self._check_fcntl_not_mutate_len()
+
+ @unittest.skipUnless(
+ hasattr(fcntl, "F_SETOWN_EX") and hasattr(fcntl, "F_GETOWN_EX"),
+ "requires F_SETOWN_EX and F_GETOWN_EX")
+ def test_fcntl_large_buffer(self):
+ self._check_fcntl_not_mutate_len(2024)
+
if __name__ == '__main__':
unittest.main()
diff --git a/Lib/test/test_fractions.py b/Lib/test/test_fractions.py
index 84faa636064..96b3f305194 100644
--- a/Lib/test/test_fractions.py
+++ b/Lib/test/test_fractions.py
@@ -1,7 +1,7 @@
"""Tests for Lib/fractions.py."""
from decimal import Decimal
-from test.support import requires_IEEE_754
+from test.support import requires_IEEE_754, adjust_int_max_str_digits
import math
import numbers
import operator
@@ -395,12 +395,14 @@ class FractionTest(unittest.TestCase):
def testFromString(self):
self.assertEqual((5, 1), _components(F("5")))
+ self.assertEqual((5, 1), _components(F("005")))
self.assertEqual((3, 2), _components(F("3/2")))
self.assertEqual((3, 2), _components(F("3 / 2")))
self.assertEqual((3, 2), _components(F(" \n +3/2")))
self.assertEqual((-3, 2), _components(F("-3/2 ")))
- self.assertEqual((13, 2), _components(F(" 013/02 \n ")))
+ self.assertEqual((13, 2), _components(F(" 0013/002 \n ")))
self.assertEqual((16, 5), _components(F(" 3.2 ")))
+ self.assertEqual((16, 5), _components(F("003.2")))
self.assertEqual((-16, 5), _components(F(" -3.2 ")))
self.assertEqual((-3, 1), _components(F(" -3. ")))
self.assertEqual((3, 5), _components(F(" .6 ")))
@@ -419,116 +421,102 @@ class FractionTest(unittest.TestCase):
self.assertRaisesMessage(
ZeroDivisionError, "Fraction(3, 0)",
F, "3/0")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '3/'",
- F, "3/")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '/2'",
- F, "/2")
- self.assertRaisesMessage(
- # Denominators don't need a sign.
- ValueError, "Invalid literal for Fraction: '3/+2'",
- F, "3/+2")
- self.assertRaisesMessage(
- # Imitate float's parsing.
- ValueError, "Invalid literal for Fraction: '+ 3/2'",
- F, "+ 3/2")
- self.assertRaisesMessage(
- # Avoid treating '.' as a regex special character.
- ValueError, "Invalid literal for Fraction: '3a2'",
- F, "3a2")
- self.assertRaisesMessage(
- # Don't accept combinations of decimals and rationals.
- ValueError, "Invalid literal for Fraction: '3/7.2'",
- F, "3/7.2")
- self.assertRaisesMessage(
- # Don't accept combinations of decimals and rationals.
- ValueError, "Invalid literal for Fraction: '3.2/7'",
- F, "3.2/7")
- self.assertRaisesMessage(
- # Allow 3. and .3, but not .
- ValueError, "Invalid literal for Fraction: '.'",
- F, ".")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '_'",
- F, "_")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '_1'",
- F, "_1")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1__2'",
- F, "1__2")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '/_'",
- F, "/_")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1_/'",
- F, "1_/")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '_1/'",
- F, "_1/")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1__2/'",
- F, "1__2/")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1/_'",
- F, "1/_")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1/_1'",
- F, "1/_1")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1/1__2'",
- F, "1/1__2")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1._111'",
- F, "1._111")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1.1__1'",
- F, "1.1__1")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1.1e+_1'",
- F, "1.1e+_1")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1.1e+1__1'",
- F, "1.1e+1__1")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '123.dd'",
- F, "123.dd")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '123.5_dd'",
- F, "123.5_dd")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: 'dd.5'",
- F, "dd.5")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '7_dd'",
- F, "7_dd")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1/dd'",
- F, "1/dd")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1/123_dd'",
- F, "1/123_dd")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '789edd'",
- F, "789edd")
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '789e2_dd'",
- F, "789e2_dd")
+
+ def check_invalid(s):
+ msg = "Invalid literal for Fraction: " + repr(s)
+ self.assertRaisesMessage(ValueError, msg, F, s)
+
+ check_invalid("3/")
+ check_invalid("/2")
+ # Denominators don't need a sign.
+ check_invalid("3/+2")
+ check_invalid("3/-2")
+ # Imitate float's parsing.
+ check_invalid("+ 3/2")
+ check_invalid("- 3/2")
+ # Avoid treating '.' as a regex special character.
+ check_invalid("3a2")
+ # Don't accept combinations of decimals and rationals.
+ check_invalid("3/7.2")
+ check_invalid("3.2/7")
+ # No space around dot.
+ check_invalid("3 .2")
+ check_invalid("3. 2")
+ # No space around e.
+ check_invalid("3.2 e1")
+ check_invalid("3.2e 1")
+ # Fractional part don't need a sign.
+ check_invalid("3.+2")
+ check_invalid("3.-2")
+ # Only accept base 10.
+ check_invalid("0x10")
+ check_invalid("0x10/1")
+ check_invalid("1/0x10")
+ check_invalid("0x10.")
+ check_invalid("0x10.1")
+ check_invalid("1.0x10")
+ check_invalid("1.0e0x10")
+ # Only accept decimal digits.
+ check_invalid("³")
+ check_invalid("³/2")
+ check_invalid("3/²")
+ check_invalid("³.2")
+ check_invalid("3.²")
+ check_invalid("3.2e²")
+ check_invalid("¼")
+ # Allow 3. and .3, but not .
+ check_invalid(".")
+ check_invalid("_")
+ check_invalid("_1")
+ check_invalid("1__2")
+ check_invalid("/_")
+ check_invalid("1_/")
+ check_invalid("_1/")
+ check_invalid("1__2/")
+ check_invalid("1/_")
+ check_invalid("1/_1")
+ check_invalid("1/1__2")
+ check_invalid("1._111")
+ check_invalid("1.1__1")
+ check_invalid("1.1e+_1")
+ check_invalid("1.1e+1__1")
+ check_invalid("123.dd")
+ check_invalid("123.5_dd")
+ check_invalid("dd.5")
+ check_invalid("7_dd")
+ check_invalid("1/dd")
+ check_invalid("1/123_dd")
+ check_invalid("789edd")
+ check_invalid("789e2_dd")
# Test catastrophic backtracking.
val = "9"*50 + "_"
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '" + val + "'",
- F, val)
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1/" + val + "'",
- F, "1/" + val)
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1." + val + "'",
- F, "1." + val)
- self.assertRaisesMessage(
- ValueError, "Invalid literal for Fraction: '1.1+e" + val + "'",
- F, "1.1+e" + val)
+ check_invalid(val)
+ check_invalid("1/" + val)
+ check_invalid("1." + val)
+ check_invalid("." + val)
+ check_invalid("1.1+e" + val)
+ check_invalid("1.1e" + val)
+
+ def test_limit_int(self):
+ maxdigits = 5000
+ with adjust_int_max_str_digits(maxdigits):
+ msg = 'Exceeds the limit'
+ val = '1' * maxdigits
+ num = (10**maxdigits - 1)//9
+ self.assertEqual((num, 1), _components(F(val)))
+ self.assertRaisesRegex(ValueError, msg, F, val + '1')
+ self.assertEqual((num, 2), _components(F(val + '/2')))
+ self.assertRaisesRegex(ValueError, msg, F, val + '1/2')
+ self.assertEqual((1, num), _components(F('1/' + val)))
+ self.assertRaisesRegex(ValueError, msg, F, '1/1' + val)
+ self.assertEqual(((10**(maxdigits+1) - 1)//9, 10**maxdigits),
+ _components(F('1.' + val)))
+ self.assertRaisesRegex(ValueError, msg, F, '1.1' + val)
+ self.assertEqual((num, 10**maxdigits), _components(F('.' + val)))
+ self.assertRaisesRegex(ValueError, msg, F, '.1' + val)
+ self.assertRaisesRegex(ValueError, msg, F, '1.1e1' + val)
+ self.assertEqual((11, 10), _components(F('1.1e' + '0' * maxdigits)))
+ self.assertRaisesRegex(ValueError, msg, F, '1.1e' + '0' * (maxdigits+1))
def testImmutable(self):
r = F(7, 3)
diff --git a/Lib/test/test_free_threading/test_functools.py b/Lib/test/test_free_threading/test_functools.py
new file mode 100644
index 00000000000..a442fe056ce
--- /dev/null
+++ b/Lib/test/test_free_threading/test_functools.py
@@ -0,0 +1,75 @@
+import random
+import unittest
+
+from functools import lru_cache
+from threading import Barrier, Thread
+
+from test.support import threading_helper
+
+@threading_helper.requires_working_threading()
+class TestLRUCache(unittest.TestCase):
+
+ def _test_concurrent_operations(self, maxsize):
+ num_threads = 10
+ b = Barrier(num_threads)
+ @lru_cache(maxsize=maxsize)
+ def func(arg=0):
+ return object()
+
+
+ def thread_func():
+ b.wait()
+ for i in range(1000):
+ r = random.randint(0, 1000)
+ if i < 800:
+ func(i)
+ elif i < 900:
+ func.cache_info()
+ else:
+ func.cache_clear()
+
+ threads = []
+ for i in range(num_threads):
+ t = Thread(target=thread_func)
+ threads.append(t)
+
+ with threading_helper.start_threads(threads):
+ pass
+
+ def test_concurrent_operations_unbounded(self):
+ self._test_concurrent_operations(maxsize=None)
+
+ def test_concurrent_operations_bounded(self):
+ self._test_concurrent_operations(maxsize=128)
+
+ def _test_reentrant_cache_clear(self, maxsize):
+ num_threads = 10
+ b = Barrier(num_threads)
+ @lru_cache(maxsize=maxsize)
+ def func(arg=0):
+ func.cache_clear()
+ return object()
+
+
+ def thread_func():
+ b.wait()
+ for i in range(1000):
+ func(random.randint(0, 10000))
+
+ threads = []
+ for i in range(num_threads):
+ t = Thread(target=thread_func)
+ threads.append(t)
+
+ with threading_helper.start_threads(threads):
+ pass
+
+ def test_reentrant_cache_clear_unbounded(self):
+ self._test_reentrant_cache_clear(maxsize=None)
+
+ def test_reentrant_cache_clear_bounded(self):
+ self._test_reentrant_cache_clear(maxsize=128)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/test/test_genericalias.py b/Lib/test/test_genericalias.py
index 8d21ded4501..ea0dc241e39 100644
--- a/Lib/test/test_genericalias.py
+++ b/Lib/test/test_genericalias.py
@@ -61,6 +61,7 @@ try:
from tkinter import Event
except ImportError:
Event = None
+from string.templatelib import Template, Interpolation
from typing import TypeVar
T = TypeVar('T')
@@ -139,7 +140,10 @@ class BaseTest(unittest.TestCase):
DictReader, DictWriter,
array,
staticmethod,
- classmethod]
+ classmethod,
+ Template,
+ Interpolation,
+ ]
if ctypes is not None:
generic_types.extend((ctypes.Array, ctypes.LibraryLoader, ctypes.py_object))
if ValueProxy is not None:
diff --git a/Lib/test/test_ioctl.py b/Lib/test/test_ioctl.py
index 7a986048bda..3c7a58aa2bc 100644
--- a/Lib/test/test_ioctl.py
+++ b/Lib/test/test_ioctl.py
@@ -127,9 +127,8 @@ class IoctlTestsTty(unittest.TestCase):
self._check_ioctl_not_mutate_len(1024)
def test_ioctl_mutate_2048(self):
- # Test with a larger buffer, just for the record.
self._check_ioctl_mutate_len(2048)
- self.assertRaises(ValueError, self._check_ioctl_not_mutate_len, 2048)
+ self._check_ioctl_not_mutate_len(1024)
@unittest.skipUnless(hasattr(os, 'openpty'), "need os.openpty()")
diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py
index 1e5adcc8db1..fa5b1e43816 100644
--- a/Lib/test/test_logging.py
+++ b/Lib/test/test_logging.py
@@ -61,7 +61,7 @@ import warnings
import weakref
from http.server import HTTPServer, BaseHTTPRequestHandler
-from unittest.mock import patch
+from unittest.mock import call, Mock, patch
from urllib.parse import urlparse, parse_qs
from socketserver import (ThreadingUDPServer, DatagramRequestHandler,
ThreadingTCPServer, StreamRequestHandler)
@@ -5655,12 +5655,19 @@ class BasicConfigTest(unittest.TestCase):
assertRaises = self.assertRaises
handlers = [logging.StreamHandler()]
stream = sys.stderr
+ formatter = logging.Formatter()
assertRaises(ValueError, logging.basicConfig, filename='test.log',
stream=stream)
assertRaises(ValueError, logging.basicConfig, filename='test.log',
handlers=handlers)
assertRaises(ValueError, logging.basicConfig, stream=stream,
handlers=handlers)
+ assertRaises(ValueError, logging.basicConfig, formatter=formatter,
+ format='%(message)s')
+ assertRaises(ValueError, logging.basicConfig, formatter=formatter,
+ datefmt='%H:%M:%S')
+ assertRaises(ValueError, logging.basicConfig, formatter=formatter,
+ style='%')
# Issue 23207: test for invalid kwargs
assertRaises(ValueError, logging.basicConfig, loglevel=logging.INFO)
# Should pop both filename and filemode even if filename is None
@@ -5795,6 +5802,20 @@ class BasicConfigTest(unittest.TestCase):
# didn't write anything due to the encoding error
self.assertEqual(data, r'')
+ def test_formatter_given(self):
+ mock_formatter = Mock()
+ mock_handler = Mock(formatter=None)
+ with patch("logging.Formatter") as mock_formatter_init:
+ logging.basicConfig(formatter=mock_formatter, handlers=[mock_handler])
+ self.assertEqual(mock_handler.setFormatter.call_args_list, [call(mock_formatter)])
+ self.assertEqual(mock_formatter_init.call_count, 0)
+
+ def test_formatter_not_given(self):
+ mock_handler = Mock(formatter=None)
+ with patch("logging.Formatter") as mock_formatter_init:
+ logging.basicConfig(handlers=[mock_handler])
+ self.assertEqual(mock_formatter_init.call_count, 1)
+
@support.requires_working_socket()
def test_log_taskName(self):
async def log_record():
diff --git a/Lib/test/test_pathlib/support/lexical_path.py b/Lib/test/test_pathlib/support/lexical_path.py
index f29a521af9b..fd7fbf283a6 100644
--- a/Lib/test/test_pathlib/support/lexical_path.py
+++ b/Lib/test/test_pathlib/support/lexical_path.py
@@ -9,9 +9,10 @@ import posixpath
from . import is_pypi
if is_pypi:
- from pathlib_abc import _JoinablePath
+ from pathlib_abc import vfspath, _JoinablePath
else:
from pathlib.types import _JoinablePath
+ from pathlib._os import vfspath
class LexicalPath(_JoinablePath):
@@ -22,20 +23,20 @@ class LexicalPath(_JoinablePath):
self._segments = pathsegments
def __hash__(self):
- return hash(str(self))
+ return hash(vfspath(self))
def __eq__(self, other):
if not isinstance(other, LexicalPath):
return NotImplemented
- return str(self) == str(other)
+ return vfspath(self) == vfspath(other)
- def __str__(self):
+ def __vfspath__(self):
if not self._segments:
return ''
return self.parser.join(*self._segments)
def __repr__(self):
- return f'{type(self).__name__}({str(self)!r})'
+ return f'{type(self).__name__}({vfspath(self)!r})'
def with_segments(self, *pathsegments):
return type(self)(*pathsegments)
diff --git a/Lib/test/test_pathlib/support/local_path.py b/Lib/test/test_pathlib/support/local_path.py
index d481fd45ead..c1423c545bf 100644
--- a/Lib/test/test_pathlib/support/local_path.py
+++ b/Lib/test/test_pathlib/support/local_path.py
@@ -97,7 +97,7 @@ class LocalPathInfo(PathInfo):
__slots__ = ('_path', '_exists', '_is_dir', '_is_file', '_is_symlink')
def __init__(self, path):
- self._path = str(path)
+ self._path = os.fspath(path)
self._exists = None
self._is_dir = None
self._is_file = None
@@ -139,14 +139,12 @@ class ReadableLocalPath(_ReadablePath, LexicalPath):
Simple implementation of a ReadablePath class for local filesystem paths.
"""
__slots__ = ('info',)
+ __fspath__ = LexicalPath.__vfspath__
def __init__(self, *pathsegments):
super().__init__(*pathsegments)
self.info = LocalPathInfo(self)
- def __fspath__(self):
- return str(self)
-
def __open_rb__(self, buffering=-1):
return open(self, 'rb')
@@ -163,9 +161,7 @@ class WritableLocalPath(_WritablePath, LexicalPath):
"""
__slots__ = ()
-
- def __fspath__(self):
- return str(self)
+ __fspath__ = LexicalPath.__vfspath__
def __open_wb__(self, buffering=-1):
return open(self, 'wb')
diff --git a/Lib/test/test_pathlib/support/zip_path.py b/Lib/test/test_pathlib/support/zip_path.py
index 2905260c9df..21e1d07423a 100644
--- a/Lib/test/test_pathlib/support/zip_path.py
+++ b/Lib/test/test_pathlib/support/zip_path.py
@@ -16,9 +16,10 @@ from stat import S_IFMT, S_ISDIR, S_ISREG, S_ISLNK
from . import is_pypi
if is_pypi:
- from pathlib_abc import PathInfo, _ReadablePath, _WritablePath
+ from pathlib_abc import vfspath, PathInfo, _ReadablePath, _WritablePath
else:
from pathlib.types import PathInfo, _ReadablePath, _WritablePath
+ from pathlib._os import vfspath
class ZipPathGround:
@@ -34,16 +35,16 @@ class ZipPathGround:
root.zip_file.close()
def create_file(self, path, data=b''):
- path.zip_file.writestr(str(path), data)
+ path.zip_file.writestr(vfspath(path), data)
def create_dir(self, path):
- zip_info = zipfile.ZipInfo(str(path) + '/')
+ zip_info = zipfile.ZipInfo(vfspath(path) + '/')
zip_info.external_attr |= stat.S_IFDIR << 16
zip_info.external_attr |= stat.FILE_ATTRIBUTE_DIRECTORY
path.zip_file.writestr(zip_info, '')
def create_symlink(self, path, target):
- zip_info = zipfile.ZipInfo(str(path))
+ zip_info = zipfile.ZipInfo(vfspath(path))
zip_info.external_attr = stat.S_IFLNK << 16
path.zip_file.writestr(zip_info, target.encode())
@@ -62,28 +63,28 @@ class ZipPathGround:
self.create_symlink(p.joinpath('brokenLinkLoop'), 'brokenLinkLoop')
def readtext(self, p):
- with p.zip_file.open(str(p), 'r') as f:
+ with p.zip_file.open(vfspath(p), 'r') as f:
f = io.TextIOWrapper(f, encoding='utf-8')
return f.read()
def readbytes(self, p):
- with p.zip_file.open(str(p), 'r') as f:
+ with p.zip_file.open(vfspath(p), 'r') as f:
return f.read()
readlink = readtext
def isdir(self, p):
- path_str = str(p) + "/"
+ path_str = vfspath(p) + "/"
return path_str in p.zip_file.NameToInfo
def isfile(self, p):
- info = p.zip_file.NameToInfo.get(str(p))
+ info = p.zip_file.NameToInfo.get(vfspath(p))
if info is None:
return False
return not stat.S_ISLNK(info.external_attr >> 16)
def islink(self, p):
- info = p.zip_file.NameToInfo.get(str(p))
+ info = p.zip_file.NameToInfo.get(vfspath(p))
if info is None:
return False
return stat.S_ISLNK(info.external_attr >> 16)
@@ -240,20 +241,20 @@ class ReadableZipPath(_ReadablePath):
zip_file.filelist = ZipFileList(zip_file)
def __hash__(self):
- return hash((str(self), self.zip_file))
+ return hash((vfspath(self), self.zip_file))
def __eq__(self, other):
if not isinstance(other, ReadableZipPath):
return NotImplemented
- return str(self) == str(other) and self.zip_file is other.zip_file
+ return vfspath(self) == vfspath(other) and self.zip_file is other.zip_file
- def __str__(self):
+ def __vfspath__(self):
if not self._segments:
return ''
return self.parser.join(*self._segments)
def __repr__(self):
- return f'{type(self).__name__}({str(self)!r}, zip_file={self.zip_file!r})'
+ return f'{type(self).__name__}({vfspath(self)!r}, zip_file={self.zip_file!r})'
def with_segments(self, *pathsegments):
return type(self)(*pathsegments, zip_file=self.zip_file)
@@ -261,7 +262,7 @@ class ReadableZipPath(_ReadablePath):
@property
def info(self):
tree = self.zip_file.filelist.tree
- return tree.resolve(str(self), follow_symlinks=False)
+ return tree.resolve(vfspath(self), follow_symlinks=False)
def __open_rb__(self, buffering=-1):
info = self.info.resolve()
@@ -301,36 +302,36 @@ class WritableZipPath(_WritablePath):
self.zip_file = zip_file
def __hash__(self):
- return hash((str(self), self.zip_file))
+ return hash((vfspath(self), self.zip_file))
def __eq__(self, other):
if not isinstance(other, WritableZipPath):
return NotImplemented
- return str(self) == str(other) and self.zip_file is other.zip_file
+ return vfspath(self) == vfspath(other) and self.zip_file is other.zip_file
- def __str__(self):
+ def __vfspath__(self):
if not self._segments:
return ''
return self.parser.join(*self._segments)
def __repr__(self):
- return f'{type(self).__name__}({str(self)!r}, zip_file={self.zip_file!r})'
+ return f'{type(self).__name__}({vfspath(self)!r}, zip_file={self.zip_file!r})'
def with_segments(self, *pathsegments):
return type(self)(*pathsegments, zip_file=self.zip_file)
def __open_wb__(self, buffering=-1):
- return self.zip_file.open(str(self), 'w')
+ return self.zip_file.open(vfspath(self), 'w')
def mkdir(self, mode=0o777):
- zinfo = zipfile.ZipInfo(str(self) + '/')
+ zinfo = zipfile.ZipInfo(vfspath(self) + '/')
zinfo.external_attr |= stat.S_IFDIR << 16
zinfo.external_attr |= stat.FILE_ATTRIBUTE_DIRECTORY
self.zip_file.writestr(zinfo, '')
def symlink_to(self, target, target_is_directory=False):
- zinfo = zipfile.ZipInfo(str(self))
+ zinfo = zipfile.ZipInfo(vfspath(self))
zinfo.external_attr = stat.S_IFLNK << 16
if target_is_directory:
zinfo.external_attr |= 0x10
- self.zip_file.writestr(zinfo, str(target))
+ self.zip_file.writestr(zinfo, vfspath(target))
diff --git a/Lib/test/test_pathlib/test_join_windows.py b/Lib/test/test_pathlib/test_join_windows.py
index 2cc634f25ef..f30c80605f7 100644
--- a/Lib/test/test_pathlib/test_join_windows.py
+++ b/Lib/test/test_pathlib/test_join_windows.py
@@ -8,6 +8,11 @@ import unittest
from .support import is_pypi
from .support.lexical_path import LexicalWindowsPath
+if is_pypi:
+ from pathlib_abc import vfspath
+else:
+ from pathlib._os import vfspath
+
class JoinTestBase:
def test_join(self):
@@ -70,17 +75,17 @@ class JoinTestBase:
self.assertEqual(p / './dd:s', P(r'C:/a/b\./dd:s'))
self.assertEqual(p / 'E:d:s', P('E:d:s'))
- def test_str(self):
+ def test_vfspath(self):
p = self.cls(r'a\b\c')
- self.assertEqual(str(p), 'a\\b\\c')
+ self.assertEqual(vfspath(p), 'a\\b\\c')
p = self.cls(r'c:\a\b\c')
- self.assertEqual(str(p), 'c:\\a\\b\\c')
+ self.assertEqual(vfspath(p), 'c:\\a\\b\\c')
p = self.cls('\\\\a\\b\\')
- self.assertEqual(str(p), '\\\\a\\b\\')
+ self.assertEqual(vfspath(p), '\\\\a\\b\\')
p = self.cls(r'\\a\b\c')
- self.assertEqual(str(p), '\\\\a\\b\\c')
+ self.assertEqual(vfspath(p), '\\\\a\\b\\c')
p = self.cls(r'\\a\b\c\d')
- self.assertEqual(str(p), '\\\\a\\b\\c\\d')
+ self.assertEqual(vfspath(p), '\\\\a\\b\\c\\d')
def test_parts(self):
P = self.cls
diff --git a/Lib/test/test_pathlib/test_pathlib.py b/Lib/test/test_pathlib/test_pathlib.py
index 8a313cc4292..37ef9fa1946 100644
--- a/Lib/test/test_pathlib/test_pathlib.py
+++ b/Lib/test/test_pathlib/test_pathlib.py
@@ -20,7 +20,7 @@ from test.support import cpython_only
from test.support import is_emscripten, is_wasi
from test.support import infinite_recursion
from test.support import os_helper
-from test.support.os_helper import TESTFN, FakePath
+from test.support.os_helper import TESTFN, FS_NONASCII, FakePath
try:
import fcntl
except ImportError:
@@ -770,12 +770,16 @@ class PurePathTest(unittest.TestCase):
self.assertEqual(self.make_uri(P('c:/')), 'file:///c:/')
self.assertEqual(self.make_uri(P('c:/a/b.c')), 'file:///c:/a/b.c')
self.assertEqual(self.make_uri(P('c:/a/b%#c')), 'file:///c:/a/b%25%23c')
- self.assertEqual(self.make_uri(P('c:/a/b\xe9')), 'file:///c:/a/b%C3%A9')
self.assertEqual(self.make_uri(P('//some/share/')), 'file://some/share/')
self.assertEqual(self.make_uri(P('//some/share/a/b.c')),
'file://some/share/a/b.c')
- self.assertEqual(self.make_uri(P('//some/share/a/b%#c\xe9')),
- 'file://some/share/a/b%25%23c%C3%A9')
+
+ from urllib.parse import quote_from_bytes
+ QUOTED_FS_NONASCII = quote_from_bytes(os.fsencode(FS_NONASCII))
+ self.assertEqual(self.make_uri(P('c:/a/b' + FS_NONASCII)),
+ 'file:///c:/a/b' + QUOTED_FS_NONASCII)
+ self.assertEqual(self.make_uri(P('//some/share/a/b%#c' + FS_NONASCII)),
+ 'file://some/share/a/b%25%23c' + QUOTED_FS_NONASCII)
@needs_windows
def test_ordering_windows(self):
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index 59ef5c99309..8af2e3488b4 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -1976,12 +1976,13 @@ class TestRemoteExec(unittest.TestCase):
def tearDown(self):
test.support.reap_children()
- def _run_remote_exec_test(self, script_code, python_args=None, env=None, prologue=''):
+ def _run_remote_exec_test(self, script_code, python_args=None, env=None,
+ prologue='',
+ script_path=os_helper.TESTFN + '_remote.py'):
# Create the script that will be remotely executed
- script = os_helper.TESTFN + '_remote.py'
- self.addCleanup(os_helper.unlink, script)
+ self.addCleanup(os_helper.unlink, script_path)
- with open(script, 'w') as f:
+ with open(script_path, 'w') as f:
f.write(script_code)
# Create and run the target process
@@ -2050,7 +2051,7 @@ sock.close()
self.assertEqual(response, b"ready")
# Try remote exec on the target process
- sys.remote_exec(proc.pid, script)
+ sys.remote_exec(proc.pid, script_path)
# Signal script to continue
client_socket.sendall(b"continue")
@@ -2073,14 +2074,32 @@ sock.close()
def test_remote_exec(self):
"""Test basic remote exec functionality"""
- script = '''
-print("Remote script executed successfully!")
-'''
+ script = 'print("Remote script executed successfully!")'
returncode, stdout, stderr = self._run_remote_exec_test(script)
# self.assertEqual(returncode, 0)
self.assertIn(b"Remote script executed successfully!", stdout)
self.assertEqual(stderr, b"")
+ def test_remote_exec_bytes(self):
+ script = 'print("Remote script executed successfully!")'
+ script_path = os.fsencode(os_helper.TESTFN) + b'_bytes_remote.py'
+ returncode, stdout, stderr = self._run_remote_exec_test(script,
+ script_path=script_path)
+ self.assertIn(b"Remote script executed successfully!", stdout)
+ self.assertEqual(stderr, b"")
+
+ @unittest.skipUnless(os_helper.TESTFN_UNDECODABLE, 'requires undecodable path')
+ @unittest.skipIf(sys.platform == 'darwin',
+ 'undecodable paths are not supported on macOS')
+ def test_remote_exec_undecodable(self):
+ script = 'print("Remote script executed successfully!")'
+ script_path = os_helper.TESTFN_UNDECODABLE + b'_undecodable_remote.py'
+ for script_path in [script_path, os.fsdecode(script_path)]:
+ returncode, stdout, stderr = self._run_remote_exec_test(script,
+ script_path=script_path)
+ self.assertIn(b"Remote script executed successfully!", stdout)
+ self.assertEqual(stderr, b"")
+
def test_remote_exec_with_self_process(self):
"""Test remote exec with the target process being the same as the test process"""
diff --git a/Lib/test/test_typing.py b/Lib/test/test_typing.py
index 8c55ba4623e..246be22a0d8 100644
--- a/Lib/test/test_typing.py
+++ b/Lib/test/test_typing.py
@@ -8080,78 +8080,13 @@ class NamedTupleTests(BaseTestCase):
self.assertIs(type(a), Group)
self.assertEqual(a, (1, [2]))
- def test_namedtuple_keyword_usage(self):
- with self.assertWarnsRegex(
- DeprecationWarning,
- "Creating NamedTuple classes using keyword arguments is deprecated"
- ):
- LocalEmployee = NamedTuple("LocalEmployee", name=str, age=int)
-
- nick = LocalEmployee('Nick', 25)
- self.assertIsInstance(nick, tuple)
- self.assertEqual(nick.name, 'Nick')
- self.assertEqual(LocalEmployee.__name__, 'LocalEmployee')
- self.assertEqual(LocalEmployee._fields, ('name', 'age'))
- self.assertEqual(LocalEmployee.__annotations__, dict(name=str, age=int))
-
- with self.assertRaisesRegex(
- TypeError,
- "Either list of fields or keywords can be provided to NamedTuple, not both"
- ):
- NamedTuple('Name', [('x', int)], y=str)
-
- with self.assertRaisesRegex(
- TypeError,
- "Either list of fields or keywords can be provided to NamedTuple, not both"
- ):
- NamedTuple('Name', [], y=str)
-
- with self.assertRaisesRegex(
- TypeError,
- (
- r"Cannot pass `None` as the 'fields' parameter "
- r"and also specify fields using keyword arguments"
- )
- ):
- NamedTuple('Name', None, x=int)
-
- def test_namedtuple_special_keyword_names(self):
- with self.assertWarnsRegex(
- DeprecationWarning,
- "Creating NamedTuple classes using keyword arguments is deprecated"
- ):
- NT = NamedTuple("NT", cls=type, self=object, typename=str, fields=list)
-
- self.assertEqual(NT.__name__, 'NT')
- self.assertEqual(NT._fields, ('cls', 'self', 'typename', 'fields'))
- a = NT(cls=str, self=42, typename='foo', fields=[('bar', tuple)])
- self.assertEqual(a.cls, str)
- self.assertEqual(a.self, 42)
- self.assertEqual(a.typename, 'foo')
- self.assertEqual(a.fields, [('bar', tuple)])
-
def test_empty_namedtuple(self):
- expected_warning = re.escape(
- "Failing to pass a value for the 'fields' parameter is deprecated "
- "and will be disallowed in Python 3.15. "
- "To create a NamedTuple class with 0 fields "
- "using the functional syntax, "
- "pass an empty list, e.g. `NT1 = NamedTuple('NT1', [])`."
- )
- with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
- NT1 = NamedTuple('NT1')
-
- expected_warning = re.escape(
- "Passing `None` as the 'fields' parameter is deprecated "
- "and will be disallowed in Python 3.15. "
- "To create a NamedTuple class with 0 fields "
- "using the functional syntax, "
- "pass an empty list, e.g. `NT2 = NamedTuple('NT2', [])`."
- )
- with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
- NT2 = NamedTuple('NT2', None)
+ with self.assertRaisesRegex(TypeError, "missing.*required.*argument"):
+ BAD = NamedTuple('BAD')
- NT3 = NamedTuple('NT2', [])
+ NT1 = NamedTuple('NT1', {})
+ NT2 = NamedTuple('NT2', ())
+ NT3 = NamedTuple('NT3', [])
class CNT(NamedTuple):
pass # empty body
@@ -8166,16 +8101,18 @@ class NamedTupleTests(BaseTestCase):
def test_namedtuple_errors(self):
with self.assertRaises(TypeError):
NamedTuple.__new__()
+ with self.assertRaisesRegex(TypeError, "object is not iterable"):
+ NamedTuple('Name', None)
with self.assertRaisesRegex(
TypeError,
- "missing 1 required positional argument"
+ "missing 2 required positional arguments"
):
NamedTuple()
with self.assertRaisesRegex(
TypeError,
- "takes from 1 to 2 positional arguments but 3 were given"
+ "takes 2 positional arguments but 3 were given"
):
NamedTuple('Emp', [('name', str)], None)
@@ -8187,10 +8124,22 @@ class NamedTupleTests(BaseTestCase):
with self.assertRaisesRegex(
TypeError,
- "missing 1 required positional argument: 'typename'"
+ "got some positional-only arguments passed as keyword arguments"
):
NamedTuple(typename='Emp', name=str, id=int)
+ with self.assertRaisesRegex(
+ TypeError,
+ "got an unexpected keyword argument"
+ ):
+ NamedTuple('Name', [('x', int)], y=str)
+
+ with self.assertRaisesRegex(
+ TypeError,
+ "got an unexpected keyword argument"
+ ):
+ NamedTuple('Name', [], y=str)
+
def test_copy_and_pickle(self):
global Emp # pickle wants to reference the class by name
Emp = NamedTuple('Emp', [('name', str), ('cool', int)])
@@ -8538,6 +8487,36 @@ class TypedDictTests(BaseTestCase):
self.assertEqual(Child.__required_keys__, frozenset(['a']))
self.assertEqual(Child.__optional_keys__, frozenset())
+ def test_inheritance_pep563(self):
+ def _make_td(future, class_name, annos, base, extra_names=None):
+ lines = []
+ if future:
+ lines.append('from __future__ import annotations')
+ lines.append('from typing import TypedDict')
+ lines.append(f'class {class_name}({base}):')
+ for name, anno in annos.items():
+ lines.append(f' {name}: {anno}')
+ code = '\n'.join(lines)
+ ns = run_code(code, extra_names)
+ return ns[class_name]
+
+ for base_future in (True, False):
+ for child_future in (True, False):
+ with self.subTest(base_future=base_future, child_future=child_future):
+ base = _make_td(
+ base_future, "Base", {"base": "int"}, "TypedDict"
+ )
+ self.assertIsNotNone(base.__annotate__)
+ child = _make_td(
+ child_future, "Child", {"child": "int"}, "Base", {"Base": base}
+ )
+ base_anno = ForwardRef("int", module="builtins") if base_future else int
+ child_anno = ForwardRef("int", module="builtins") if child_future else int
+ self.assertEqual(base.__annotations__, {'base': base_anno})
+ self.assertEqual(
+ child.__annotations__, {'child': child_anno, 'base': base_anno}
+ )
+
def test_required_notrequired_keys(self):
self.assertEqual(NontotalMovie.__required_keys__,
frozenset({"title"}))
@@ -8904,39 +8883,27 @@ class TypedDictTests(BaseTestCase):
self.assertEqual(CallTypedDict.__orig_bases__, (TypedDict,))
def test_zero_fields_typeddicts(self):
- T1 = TypedDict("T1", {})
+ T1a = TypedDict("T1a", {})
+ T1b = TypedDict("T1b", [])
+ T1c = TypedDict("T1c", ())
class T2(TypedDict): pass
class T3[tvar](TypedDict): pass
S = TypeVar("S")
class T4(TypedDict, Generic[S]): pass
- expected_warning = re.escape(
- "Failing to pass a value for the 'fields' parameter is deprecated "
- "and will be disallowed in Python 3.15. "
- "To create a TypedDict class with 0 fields "
- "using the functional syntax, "
- "pass an empty dictionary, e.g. `T5 = TypedDict('T5', {})`."
- )
- with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
- T5 = TypedDict('T5')
-
- expected_warning = re.escape(
- "Passing `None` as the 'fields' parameter is deprecated "
- "and will be disallowed in Python 3.15. "
- "To create a TypedDict class with 0 fields "
- "using the functional syntax, "
- "pass an empty dictionary, e.g. `T6 = TypedDict('T6', {})`."
- )
- with self.assertWarnsRegex(DeprecationWarning, fr"^{expected_warning}$"):
- T6 = TypedDict('T6', None)
-
- for klass in T1, T2, T3, T4, T5, T6:
+ for klass in T1a, T1b, T1c, T2, T3, T4:
with self.subTest(klass=klass.__name__):
self.assertEqual(klass.__annotations__, {})
self.assertEqual(klass.__required_keys__, set())
self.assertEqual(klass.__optional_keys__, set())
self.assertIsInstance(klass(), dict)
+ def test_errors(self):
+ with self.assertRaisesRegex(TypeError, "missing 1 required.*argument"):
+ TypedDict('TD')
+ with self.assertRaisesRegex(TypeError, "object is not iterable"):
+ TypedDict('TD', None)
+
def test_readonly_inheritance(self):
class Base1(TypedDict):
a: ReadOnly[int]
@@ -10731,6 +10698,9 @@ class UnionGenericAliasTests(BaseTestCase):
with self.assertWarns(DeprecationWarning):
self.assertNotEqual(int, typing._UnionGenericAlias)
+ def test_hashable(self):
+ self.assertEqual(hash(typing._UnionGenericAlias), hash(Union))
+
def load_tests(loader, tests, pattern):
import doctest
diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py
index c965860fbb1..bc1030eea60 100644
--- a/Lib/test/test_urllib.py
+++ b/Lib/test/test_urllib.py
@@ -109,7 +109,7 @@ class urlopen_FileTests(unittest.TestCase):
finally:
f.close()
self.pathname = os_helper.TESTFN
- self.quoted_pathname = urllib.parse.quote(self.pathname)
+ self.quoted_pathname = urllib.parse.quote(os.fsencode(self.pathname))
self.returned_obj = urllib.request.urlopen("file:%s" % self.quoted_pathname)
def tearDown(self):
diff --git a/Lib/test/test_wave.py b/Lib/test/test_wave.py
index 5e771c8de96..6c3362857fc 100644
--- a/Lib/test/test_wave.py
+++ b/Lib/test/test_wave.py
@@ -136,32 +136,6 @@ class MiscTestCase(unittest.TestCase):
not_exported = {'WAVE_FORMAT_PCM', 'WAVE_FORMAT_EXTENSIBLE', 'KSDATAFORMAT_SUBTYPE_PCM'}
support.check__all__(self, wave, not_exported=not_exported)
- def test_read_deprecations(self):
- filename = support.findfile('pluck-pcm8.wav', subdir='audiodata')
- with wave.open(filename) as reader:
- with self.assertWarns(DeprecationWarning):
- with self.assertRaises(wave.Error):
- reader.getmark('mark')
- with self.assertWarns(DeprecationWarning):
- self.assertIsNone(reader.getmarkers())
-
- def test_write_deprecations(self):
- with io.BytesIO(b'') as tmpfile:
- with wave.open(tmpfile, 'wb') as writer:
- writer.setnchannels(1)
- writer.setsampwidth(1)
- writer.setframerate(1)
- writer.setcomptype('NONE', 'not compressed')
-
- with self.assertWarns(DeprecationWarning):
- with self.assertRaises(wave.Error):
- writer.setmark(0, 0, 'mark')
- with self.assertWarns(DeprecationWarning):
- with self.assertRaises(wave.Error):
- writer.getmark('mark')
- with self.assertWarns(DeprecationWarning):
- self.assertIsNone(writer.getmarkers())
-
class WaveLowLevelTest(unittest.TestCase):
diff --git a/Lib/test/test_zipfile/test_core.py b/Lib/test/test_zipfile/test_core.py
index ae898150658..43056978848 100644
--- a/Lib/test/test_zipfile/test_core.py
+++ b/Lib/test/test_zipfile/test_core.py
@@ -3642,7 +3642,7 @@ class EncodedMetadataTests(unittest.TestCase):
except OSError:
pass
except UnicodeEncodeError:
- self.skipTest(f'cannot encode file name {fn!r}')
+ self.skipTest(f'cannot encode file name {fn!a}')
zipfile.main(["--metadata-encoding=shift_jis", "-e", TESTFN, TESTFN2])
listing = os.listdir(TESTFN2)
diff --git a/Lib/test/test_zstd.py b/Lib/test/test_zstd.py
index 713294c4c27..53ca592ea38 100644
--- a/Lib/test/test_zstd.py
+++ b/Lib/test/test_zstd.py
@@ -288,8 +288,8 @@ class CompressorTestCase(unittest.TestCase):
KEY = 100001234
option = {CompressionParameter.compression_level: 10,
KEY: 200000000}
- pattern = r'Zstd compression parameter.*?"unknown parameter \(key %d\)"' \
- % KEY
+ pattern = (r'Invalid zstd compression parameter.*?'
+ fr'"unknown parameter \(key {KEY}\)"')
with self.assertRaisesRegex(ZstdError, pattern):
ZstdCompressor(options=option)
@@ -420,8 +420,8 @@ class DecompressorTestCase(unittest.TestCase):
KEY = 100001234
options = {DecompressionParameter.window_log_max: DecompressionParameter.window_log_max.bounds()[1],
KEY: 200000000}
- pattern = r'Zstd decompression parameter.*?"unknown parameter \(key %d\)"' \
- % KEY
+ pattern = (r'Invalid zstd decompression parameter.*?'
+ fr'"unknown parameter \(key {KEY}\)"')
with self.assertRaisesRegex(ZstdError, pattern):
ZstdDecompressor(options=options)
@@ -507,7 +507,7 @@ class DecompressorTestCase(unittest.TestCase):
self.assertFalse(d.needs_input)
def test_decompressor_arg(self):
- zd = ZstdDict(b'12345678', True)
+ zd = ZstdDict(b'12345678', is_raw=True)
with self.assertRaises(TypeError):
d = ZstdDecompressor(zstd_dict={})
@@ -1021,6 +1021,10 @@ class DecompressorFlagsTestCase(unittest.TestCase):
class ZstdDictTestCase(unittest.TestCase):
def test_is_raw(self):
+ # must be passed as a keyword argument
+ with self.assertRaises(TypeError):
+ ZstdDict(bytes(8), True)
+
# content < 8
b = b'1234567'
with self.assertRaises(ValueError):
@@ -1068,9 +1072,9 @@ class ZstdDictTestCase(unittest.TestCase):
# corrupted
zd = ZstdDict(dict_content, is_raw=False)
- with self.assertRaisesRegex(ZstdError, r'ZSTD_CDict.*?corrupted'):
+ with self.assertRaisesRegex(ZstdError, r'ZSTD_CDict.*?content\.$'):
ZstdCompressor(zstd_dict=zd.as_digested_dict)
- with self.assertRaisesRegex(ZstdError, r'ZSTD_DDict.*?corrupted'):
+ with self.assertRaisesRegex(ZstdError, r'ZSTD_DDict.*?content\.$'):
ZstdDecompressor(zd)
# wrong type
@@ -1096,7 +1100,7 @@ class ZstdDictTestCase(unittest.TestCase):
TRAINED_DICT = train_dict(SAMPLES, DICT_SIZE1)
- ZstdDict(TRAINED_DICT.dict_content, False)
+ ZstdDict(TRAINED_DICT.dict_content, is_raw=False)
self.assertNotEqual(TRAINED_DICT.dict_id, 0)
self.assertGreater(len(TRAINED_DICT.dict_content), 0)
@@ -1250,7 +1254,7 @@ class ZstdDictTestCase(unittest.TestCase):
def test_as_prefix(self):
# V1
V1 = THIS_FILE_BYTES
- zd = ZstdDict(V1, True)
+ zd = ZstdDict(V1, is_raw=True)
# V2
mid = len(V1) // 2
@@ -1266,7 +1270,7 @@ class ZstdDictTestCase(unittest.TestCase):
self.assertEqual(decompress(dat, zd.as_prefix), V2)
# use wrong prefix
- zd2 = ZstdDict(SAMPLES[0], True)
+ zd2 = ZstdDict(SAMPLES[0], is_raw=True)
try:
decompressed = decompress(dat, zd2.as_prefix)
except ZstdError: # expected
@@ -2426,6 +2430,7 @@ class OpenTestCase(unittest.TestCase):
self.assertEqual(f.write(arr), LENGTH)
self.assertEqual(f.tell(), LENGTH)
+@unittest.skip("it fails for now, see gh-133885")
class FreeThreadingMethodTests(unittest.TestCase):
@unittest.skipUnless(Py_GIL_DISABLED, 'this test can only possibly fail with GIL disabled')
diff --git a/Lib/typing.py b/Lib/typing.py
index 2baf655256d..98af61be8b0 100644
--- a/Lib/typing.py
+++ b/Lib/typing.py
@@ -1649,6 +1649,9 @@ class _UnionGenericAliasMeta(type):
return True
return NotImplemented
+ def __hash__(self):
+ return hash(Union)
+
class _UnionGenericAlias(metaclass=_UnionGenericAliasMeta):
"""Compatibility hack.
@@ -2968,7 +2971,7 @@ class NamedTupleMeta(type):
return nm_tpl
-def NamedTuple(typename, fields=_sentinel, /, **kwargs):
+def NamedTuple(typename, fields, /):
"""Typed version of namedtuple.
Usage::
@@ -2988,48 +2991,9 @@ def NamedTuple(typename, fields=_sentinel, /, **kwargs):
Employee = NamedTuple('Employee', [('name', str), ('id', int)])
"""
- if fields is _sentinel:
- if kwargs:
- deprecated_thing = "Creating NamedTuple classes using keyword arguments"
- deprecation_msg = (
- "{name} is deprecated and will be disallowed in Python {remove}. "
- "Use the class-based or functional syntax instead."
- )
- else:
- deprecated_thing = "Failing to pass a value for the 'fields' parameter"
- example = f"`{typename} = NamedTuple({typename!r}, [])`"
- deprecation_msg = (
- "{name} is deprecated and will be disallowed in Python {remove}. "
- "To create a NamedTuple class with 0 fields "
- "using the functional syntax, "
- "pass an empty list, e.g. "
- ) + example + "."
- elif fields is None:
- if kwargs:
- raise TypeError(
- "Cannot pass `None` as the 'fields' parameter "
- "and also specify fields using keyword arguments"
- )
- else:
- deprecated_thing = "Passing `None` as the 'fields' parameter"
- example = f"`{typename} = NamedTuple({typename!r}, [])`"
- deprecation_msg = (
- "{name} is deprecated and will be disallowed in Python {remove}. "
- "To create a NamedTuple class with 0 fields "
- "using the functional syntax, "
- "pass an empty list, e.g. "
- ) + example + "."
- elif kwargs:
- raise TypeError("Either list of fields or keywords"
- " can be provided to NamedTuple, not both")
- if fields is _sentinel or fields is None:
- import warnings
- warnings._deprecated(deprecated_thing, message=deprecation_msg, remove=(3, 15))
- fields = kwargs.items()
types = {n: _type_check(t, f"field {n} annotation must be a type")
for n, t in fields}
field_names = [n for n, _ in fields]
-
nt = _make_nmtuple(typename, field_names, _make_eager_annotate(types), module=_caller())
nt.__orig_bases__ = (NamedTuple,)
return nt
@@ -3084,14 +3048,16 @@ class _TypedDictMeta(type):
else:
generic_base = ()
+ ns_annotations = ns.pop('__annotations__', None)
+
tp_dict = type.__new__(_TypedDictMeta, name, (*generic_base, dict), ns)
if not hasattr(tp_dict, '__orig_bases__'):
tp_dict.__orig_bases__ = bases
- if "__annotations__" in ns:
+ if ns_annotations is not None:
own_annotate = None
- own_annotations = ns["__annotations__"]
+ own_annotations = ns_annotations
elif (own_annotate := _lazy_annotationlib.get_annotate_from_class_namespace(ns)) is not None:
own_annotations = _lazy_annotationlib.call_annotate_function(
own_annotate, _lazy_annotationlib.Format.FORWARDREF, owner=tp_dict
@@ -3162,7 +3128,7 @@ class _TypedDictMeta(type):
if base_annotate is None:
continue
base_annos = _lazy_annotationlib.call_annotate_function(
- base.__annotate__, format, owner=base)
+ base_annotate, format, owner=base)
annos.update(base_annos)
if own_annotate is not None:
own = _lazy_annotationlib.call_annotate_function(
@@ -3198,7 +3164,7 @@ class _TypedDictMeta(type):
__instancecheck__ = __subclasscheck__
-def TypedDict(typename, fields=_sentinel, /, *, total=True):
+def TypedDict(typename, fields, /, *, total=True):
"""A simple typed namespace. At runtime it is equivalent to a plain dict.
TypedDict creates a dictionary type such that a type checker will expect all
@@ -3253,24 +3219,6 @@ def TypedDict(typename, fields=_sentinel, /, *, total=True):
username: str # the "username" key can be changed
"""
- if fields is _sentinel or fields is None:
- import warnings
-
- if fields is _sentinel:
- deprecated_thing = "Failing to pass a value for the 'fields' parameter"
- else:
- deprecated_thing = "Passing `None` as the 'fields' parameter"
-
- example = f"`{typename} = TypedDict({typename!r}, {{{{}}}})`"
- deprecation_msg = (
- "{name} is deprecated and will be disallowed in Python {remove}. "
- "To create a TypedDict class with 0 fields "
- "using the functional syntax, "
- "pass an empty dictionary, e.g. "
- ) + example + "."
- warnings._deprecated(deprecated_thing, message=deprecation_msg, remove=(3, 15))
- fields = {}
-
ns = {'__annotations__': dict(fields)}
module = _caller()
if module is not None:
diff --git a/Lib/venv/__init__.py b/Lib/venv/__init__.py
index 15e15b7a518..dc9c5991df7 100644
--- a/Lib/venv/__init__.py
+++ b/Lib/venv/__init__.py
@@ -313,11 +313,8 @@ class EnvBuilder:
copier(context.executable, path)
if not os.path.islink(path):
os.chmod(path, 0o755)
-
- suffixes = ['python', 'python3', f'python3.{sys.version_info[1]}']
- if sys.version_info[:2] == (3, 14):
- suffixes.append('𝜋thon')
- for suffix in suffixes:
+ for suffix in ('python', 'python3',
+ f'python3.{sys.version_info[1]}'):
path = os.path.join(binpath, suffix)
if not os.path.exists(path):
# Issue 18807: make copies if
diff --git a/Lib/wave.py b/Lib/wave.py
index a34af244c3e..929609fa524 100644
--- a/Lib/wave.py
+++ b/Lib/wave.py
@@ -20,10 +20,6 @@ This returns an instance of a class with the following public methods:
compression type ('not compressed' linear samples)
getparams() -- returns a namedtuple consisting of all of the
above in the above order
- getmarkers() -- returns None (for compatibility with the
- old aifc module)
- getmark(id) -- raises an error since the mark does not
- exist (for compatibility with the old aifc module)
readframes(n) -- returns at most n frames of audio
rewind() -- rewind to the beginning of the audio stream
setpos(pos) -- seek to the specified position
@@ -341,16 +337,6 @@ class Wave_read:
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
- def getmarkers(self):
- import warnings
- warnings._deprecated("Wave_read.getmarkers", remove=(3, 15))
- return None
-
- def getmark(self, id):
- import warnings
- warnings._deprecated("Wave_read.getmark", remove=(3, 15))
- raise Error('no marks')
-
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error('position not in range')
@@ -551,21 +537,6 @@ class Wave_write:
return _wave_params(self._nchannels, self._sampwidth, self._framerate,
self._nframes, self._comptype, self._compname)
- def setmark(self, id, pos, name):
- import warnings
- warnings._deprecated("Wave_write.setmark", remove=(3, 15))
- raise Error('setmark() not supported')
-
- def getmark(self, id):
- import warnings
- warnings._deprecated("Wave_write.getmark", remove=(3, 15))
- raise Error('no marks')
-
- def getmarkers(self):
- import warnings
- warnings._deprecated("Wave_write.getmarkers", remove=(3, 15))
- return None
-
def tell(self):
return self._nframeswritten
diff --git a/Lib/zipfile/__init__.py b/Lib/zipfile/__init__.py
index 88356abe8cb..894b4d37233 100644
--- a/Lib/zipfile/__init__.py
+++ b/Lib/zipfile/__init__.py
@@ -38,8 +38,8 @@ except ImportError:
__all__ = ["BadZipFile", "BadZipfile", "error",
"ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA",
- "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile",
- "Path"]
+ "ZIP_ZSTANDARD", "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile",
+ "LargeZipFile", "Path"]
class BadZipFile(Exception):
pass
@@ -812,11 +812,11 @@ def _get_compressor(compress_type, compresslevel=None):
if compresslevel is not None:
return bz2.BZ2Compressor(compresslevel)
return bz2.BZ2Compressor()
- # compresslevel is ignored for ZIP_LZMA and ZIP_ZSTANDARD
+ # compresslevel is ignored for ZIP_LZMA
elif compress_type == ZIP_LZMA:
return LZMACompressor()
elif compress_type == ZIP_ZSTANDARD:
- return zstd.ZstdCompressor()
+ return zstd.ZstdCompressor(level=compresslevel)
else:
return None
@@ -1352,7 +1352,8 @@ class ZipFile:
mode: The mode can be either read 'r', write 'w', exclusive create 'x',
or append 'a'.
compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib),
- ZIP_BZIP2 (requires bz2) or ZIP_LZMA (requires lzma).
+ ZIP_BZIP2 (requires bz2), ZIP_LZMA (requires lzma), or
+ ZIP_ZSTANDARD (requires compression.zstd).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
@@ -1361,6 +1362,9 @@ class ZipFile:
When using ZIP_STORED or ZIP_LZMA this keyword has no effect.
When using ZIP_DEFLATED integers 0 through 9 are accepted.
When using ZIP_BZIP2 integers 1 through 9 are accepted.
+ When using ZIP_ZSTANDARD integers -7 though 22 are common,
+ see the CompressionParameter enum in compression.zstd for
+ details.
"""
@@ -2093,6 +2097,8 @@ class ZipFile:
min_version = max(BZIP2_VERSION, min_version)
elif zinfo.compress_type == ZIP_LZMA:
min_version = max(LZMA_VERSION, min_version)
+ elif zinfo.compress_type == ZIP_ZSTANDARD:
+ min_version = max(ZSTANDARD_VERSION, min_version)
extract_version = max(min_version, zinfo.extract_version)
create_version = max(min_version, zinfo.create_version)