aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Lib/test
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/test')
-rw-r--r--Lib/test/pickletester.py5
-rw-r--r--Lib/test/support/channels.py (renamed from Lib/test/support/interpreters/channels.py)4
-rw-r--r--Lib/test/support/interpreters/__init__.py257
-rw-r--r--Lib/test/support/interpreters/_crossinterp.py102
-rw-r--r--Lib/test/support/interpreters/queues.py288
-rw-r--r--Lib/test/test__interpchannels.py2
-rw-r--r--Lib/test/test_calendar.py2
-rw-r--r--Lib/test/test_capi/test_opt.py15
-rw-r--r--Lib/test/test_concurrent_futures/test_interpreter_pool.py2
-rw-r--r--Lib/test/test_concurrent_futures/test_shutdown.py58
-rw-r--r--Lib/test/test_cprofile.py19
-rw-r--r--Lib/test/test_free_threading/test_heapq.py240
-rw-r--r--Lib/test/test_generated_cases.py38
-rw-r--r--Lib/test/test_interpreters/test_api.py26
-rw-r--r--Lib/test/test_interpreters/test_channels.py16
-rw-r--r--Lib/test/test_interpreters/test_lifecycle.py4
-rw-r--r--Lib/test/test_interpreters/test_queues.py18
-rw-r--r--Lib/test/test_interpreters/test_stress.py2
-rw-r--r--Lib/test/test_interpreters/utils.py2
-rw-r--r--Lib/test/test_locale.py4
-rw-r--r--Lib/test/test_random.py12
-rw-r--r--Lib/test/test_sqlite3/test_cli.py111
-rw-r--r--Lib/test/test_syntax.py7
-rw-r--r--Lib/test/test_sys.py2
-rw-r--r--Lib/test/test_threading.py2
-rw-r--r--Lib/test/test_types.py9
26 files changed, 514 insertions, 733 deletions
diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py
index 9d6ae3e4d00..9a3a26a8400 100644
--- a/Lib/test/pickletester.py
+++ b/Lib/test/pickletester.py
@@ -1100,6 +1100,11 @@ class AbstractUnpickleTests:
self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
dumped)
+ def test_large_binstring(self):
+ errmsg = 'BINSTRING pickle has negative byte count'
+ with self.assertRaisesRegex(pickle.UnpicklingError, errmsg):
+ self.loads(b'T\0\0\0\x80')
+
def test_get(self):
pickled = b'((lp100000\ng100000\nt.'
unpickled = self.loads(pickled)
diff --git a/Lib/test/support/interpreters/channels.py b/Lib/test/support/channels.py
index 1724759b75a..b2de24d9d3e 100644
--- a/Lib/test/support/interpreters/channels.py
+++ b/Lib/test/support/channels.py
@@ -2,14 +2,14 @@
import time
import _interpchannels as _channels
-from . import _crossinterp
+from concurrent.interpreters import _crossinterp
# aliases:
from _interpchannels import (
ChannelError, ChannelNotFoundError, ChannelClosedError, # noqa: F401
ChannelEmptyError, ChannelNotEmptyError, # noqa: F401
)
-from ._crossinterp import (
+from concurrent.interpreters._crossinterp import (
UNBOUND_ERROR, UNBOUND_REMOVE,
)
diff --git a/Lib/test/support/interpreters/__init__.py b/Lib/test/support/interpreters/__init__.py
deleted file mode 100644
index 6d1b0690805..00000000000
--- a/Lib/test/support/interpreters/__init__.py
+++ /dev/null
@@ -1,257 +0,0 @@
-"""Subinterpreters High Level Module."""
-
-import threading
-import weakref
-import _interpreters
-
-# aliases:
-from _interpreters import (
- InterpreterError, InterpreterNotFoundError, NotShareableError,
- is_shareable,
-)
-
-
-__all__ = [
- 'get_current', 'get_main', 'create', 'list_all', 'is_shareable',
- 'Interpreter',
- 'InterpreterError', 'InterpreterNotFoundError', 'ExecutionFailed',
- 'NotShareableError',
- 'create_queue', 'Queue', 'QueueEmpty', 'QueueFull',
-]
-
-
-_queuemod = None
-
-def __getattr__(name):
- if name in ('Queue', 'QueueEmpty', 'QueueFull', 'create_queue'):
- global create_queue, Queue, QueueEmpty, QueueFull
- ns = globals()
- from .queues import (
- create as create_queue,
- Queue, QueueEmpty, QueueFull,
- )
- return ns[name]
- else:
- raise AttributeError(name)
-
-
-_EXEC_FAILURE_STR = """
-{superstr}
-
-Uncaught in the interpreter:
-
-{formatted}
-""".strip()
-
-class ExecutionFailed(InterpreterError):
- """An unhandled exception happened during execution.
-
- This is raised from Interpreter.exec() and Interpreter.call().
- """
-
- def __init__(self, excinfo):
- msg = excinfo.formatted
- if not msg:
- if excinfo.type and excinfo.msg:
- msg = f'{excinfo.type.__name__}: {excinfo.msg}'
- else:
- msg = excinfo.type.__name__ or excinfo.msg
- super().__init__(msg)
- self.excinfo = excinfo
-
- def __str__(self):
- try:
- formatted = self.excinfo.errdisplay
- except Exception:
- return super().__str__()
- else:
- return _EXEC_FAILURE_STR.format(
- superstr=super().__str__(),
- formatted=formatted,
- )
-
-
-def create():
- """Return a new (idle) Python interpreter."""
- id = _interpreters.create(reqrefs=True)
- return Interpreter(id, _ownsref=True)
-
-
-def list_all():
- """Return all existing interpreters."""
- return [Interpreter(id, _whence=whence)
- for id, whence in _interpreters.list_all(require_ready=True)]
-
-
-def get_current():
- """Return the currently running interpreter."""
- id, whence = _interpreters.get_current()
- return Interpreter(id, _whence=whence)
-
-
-def get_main():
- """Return the main interpreter."""
- id, whence = _interpreters.get_main()
- assert whence == _interpreters.WHENCE_RUNTIME, repr(whence)
- return Interpreter(id, _whence=whence)
-
-
-_known = weakref.WeakValueDictionary()
-
-class Interpreter:
- """A single Python interpreter.
-
- Attributes:
-
- "id" - the unique process-global ID number for the interpreter
- "whence" - indicates where the interpreter was created
-
- If the interpreter wasn't created by this module
- then any method that modifies the interpreter will fail,
- i.e. .close(), .prepare_main(), .exec(), and .call()
- """
-
- _WHENCE_TO_STR = {
- _interpreters.WHENCE_UNKNOWN: 'unknown',
- _interpreters.WHENCE_RUNTIME: 'runtime init',
- _interpreters.WHENCE_LEGACY_CAPI: 'legacy C-API',
- _interpreters.WHENCE_CAPI: 'C-API',
- _interpreters.WHENCE_XI: 'cross-interpreter C-API',
- _interpreters.WHENCE_STDLIB: '_interpreters module',
- }
-
- def __new__(cls, id, /, _whence=None, _ownsref=None):
- # There is only one instance for any given ID.
- if not isinstance(id, int):
- raise TypeError(f'id must be an int, got {id!r}')
- id = int(id)
- if _whence is None:
- if _ownsref:
- _whence = _interpreters.WHENCE_STDLIB
- else:
- _whence = _interpreters.whence(id)
- assert _whence in cls._WHENCE_TO_STR, repr(_whence)
- if _ownsref is None:
- _ownsref = (_whence == _interpreters.WHENCE_STDLIB)
- try:
- self = _known[id]
- assert hasattr(self, '_ownsref')
- except KeyError:
- self = super().__new__(cls)
- _known[id] = self
- self._id = id
- self._whence = _whence
- self._ownsref = _ownsref
- if _ownsref:
- # This may raise InterpreterNotFoundError:
- _interpreters.incref(id)
- return self
-
- def __repr__(self):
- return f'{type(self).__name__}({self.id})'
-
- def __hash__(self):
- return hash(self._id)
-
- def __del__(self):
- self._decref()
-
- # for pickling:
- def __getnewargs__(self):
- return (self._id,)
-
- # for pickling:
- def __getstate__(self):
- return None
-
- def _decref(self):
- if not self._ownsref:
- return
- self._ownsref = False
- try:
- _interpreters.decref(self._id)
- except InterpreterNotFoundError:
- pass
-
- @property
- def id(self):
- return self._id
-
- @property
- def whence(self):
- return self._WHENCE_TO_STR[self._whence]
-
- def is_running(self):
- """Return whether or not the identified interpreter is running."""
- return _interpreters.is_running(self._id)
-
- # Everything past here is available only to interpreters created by
- # interpreters.create().
-
- def close(self):
- """Finalize and destroy the interpreter.
-
- Attempting to destroy the current interpreter results
- in an InterpreterError.
- """
- return _interpreters.destroy(self._id, restrict=True)
-
- def prepare_main(self, ns=None, /, **kwargs):
- """Bind the given values into the interpreter's __main__.
-
- The values must be shareable.
- """
- ns = dict(ns, **kwargs) if ns is not None else kwargs
- _interpreters.set___main___attrs(self._id, ns, restrict=True)
-
- def exec(self, code, /):
- """Run the given source code in the interpreter.
-
- This is essentially the same as calling the builtin "exec"
- with this interpreter, using the __dict__ of its __main__
- module as both globals and locals.
-
- There is no return value.
-
- If the code raises an unhandled exception then an ExecutionFailed
- exception is raised, which summarizes the unhandled exception.
- The actual exception is discarded because objects cannot be
- shared between interpreters.
-
- This blocks the current Python thread until done. During
- that time, the previous interpreter is allowed to run
- in other threads.
- """
- excinfo = _interpreters.exec(self._id, code, restrict=True)
- if excinfo is not None:
- raise ExecutionFailed(excinfo)
-
- def _call(self, callable, args, kwargs):
- res, excinfo = _interpreters.call(self._id, callable, args, kwargs, restrict=True)
- if excinfo is not None:
- raise ExecutionFailed(excinfo)
- return res
-
- def call(self, callable, /, *args, **kwargs):
- """Call the object in the interpreter with given args/kwargs.
-
- Nearly all callables, args, kwargs, and return values are
- supported. All "shareable" objects are supported, as are
- "stateless" functions (meaning non-closures that do not use
- any globals). This method will fall back to pickle.
-
- If the callable raises an exception then the error display
- (including full traceback) is sent back between the interpreters
- and an ExecutionFailed exception is raised, much like what
- happens with Interpreter.exec().
- """
- return self._call(callable, args, kwargs)
-
- def call_in_thread(self, callable, /, *args, **kwargs):
- """Return a new thread that calls the object in the interpreter.
-
- The return value and any raised exception are discarded.
- """
- t = threading.Thread(target=self._call, args=(callable, args, kwargs))
- t.start()
- return t
diff --git a/Lib/test/support/interpreters/_crossinterp.py b/Lib/test/support/interpreters/_crossinterp.py
deleted file mode 100644
index 544e197ba4c..00000000000
--- a/Lib/test/support/interpreters/_crossinterp.py
+++ /dev/null
@@ -1,102 +0,0 @@
-"""Common code between queues and channels."""
-
-
-class ItemInterpreterDestroyed(Exception):
- """Raised when trying to get an item whose interpreter was destroyed."""
-
-
-class classonly:
- """A non-data descriptor that makes a value only visible on the class.
-
- This is like the "classmethod" builtin, but does not show up on
- instances of the class. It may be used as a decorator.
- """
-
- def __init__(self, value):
- self.value = value
- self.getter = classmethod(value).__get__
- self.name = None
-
- def __set_name__(self, cls, name):
- if self.name is not None:
- raise TypeError('already used')
- self.name = name
-
- def __get__(self, obj, cls):
- if obj is not None:
- raise AttributeError(self.name)
- # called on the class
- return self.getter(None, cls)
-
-
-class UnboundItem:
- """Represents a cross-interpreter item no longer bound to an interpreter.
-
- An item is unbound when the interpreter that added it to the
- cross-interpreter container is destroyed.
- """
-
- __slots__ = ()
-
- @classonly
- def singleton(cls, kind, module, name='UNBOUND'):
- doc = cls.__doc__.replace('cross-interpreter container', kind)
- doc = doc.replace('cross-interpreter', kind)
- subclass = type(
- f'Unbound{kind.capitalize()}Item',
- (cls,),
- dict(
- _MODULE=module,
- _NAME=name,
- __doc__=doc,
- ),
- )
- return object.__new__(subclass)
-
- _MODULE = __name__
- _NAME = 'UNBOUND'
-
- def __new__(cls):
- raise Exception(f'use {cls._MODULE}.{cls._NAME}')
-
- def __repr__(self):
- return f'{self._MODULE}.{self._NAME}'
-# return f'interpreters.queues.UNBOUND'
-
-
-UNBOUND = object.__new__(UnboundItem)
-UNBOUND_ERROR = object()
-UNBOUND_REMOVE = object()
-
-_UNBOUND_CONSTANT_TO_FLAG = {
- UNBOUND_REMOVE: 1,
- UNBOUND_ERROR: 2,
- UNBOUND: 3,
-}
-_UNBOUND_FLAG_TO_CONSTANT = {v: k
- for k, v in _UNBOUND_CONSTANT_TO_FLAG.items()}
-
-
-def serialize_unbound(unbound):
- op = unbound
- try:
- flag = _UNBOUND_CONSTANT_TO_FLAG[op]
- except KeyError:
- raise NotImplementedError(f'unsupported unbound replacement op {op!r}')
- return flag,
-
-
-def resolve_unbound(flag, exctype_destroyed):
- try:
- op = _UNBOUND_FLAG_TO_CONSTANT[flag]
- except KeyError:
- raise NotImplementedError(f'unsupported unbound replacement op {flag!r}')
- if op is UNBOUND_REMOVE:
- # "remove" not possible here
- raise NotImplementedError
- elif op is UNBOUND_ERROR:
- raise exctype_destroyed("item's original interpreter destroyed")
- elif op is UNBOUND:
- return UNBOUND
- else:
- raise NotImplementedError(repr(op))
diff --git a/Lib/test/support/interpreters/queues.py b/Lib/test/support/interpreters/queues.py
deleted file mode 100644
index 99987f2f692..00000000000
--- a/Lib/test/support/interpreters/queues.py
+++ /dev/null
@@ -1,288 +0,0 @@
-"""Cross-interpreter Queues High Level Module."""
-
-import queue
-import time
-import weakref
-import _interpqueues as _queues
-from . import _crossinterp
-
-# aliases:
-from _interpqueues import (
- QueueError, QueueNotFoundError,
-)
-from ._crossinterp import (
- UNBOUND_ERROR, UNBOUND_REMOVE,
-)
-
-__all__ = [
- 'UNBOUND', 'UNBOUND_ERROR', 'UNBOUND_REMOVE',
- 'create', 'list_all',
- 'Queue',
- 'QueueError', 'QueueNotFoundError', 'QueueEmpty', 'QueueFull',
- 'ItemInterpreterDestroyed',
-]
-
-
-class QueueEmpty(QueueError, queue.Empty):
- """Raised from get_nowait() when the queue is empty.
-
- It is also raised from get() if it times out.
- """
-
-
-class QueueFull(QueueError, queue.Full):
- """Raised from put_nowait() when the queue is full.
-
- It is also raised from put() if it times out.
- """
-
-
-class ItemInterpreterDestroyed(QueueError,
- _crossinterp.ItemInterpreterDestroyed):
- """Raised from get() and get_nowait()."""
-
-
-_SHARED_ONLY = 0
-_PICKLED = 1
-
-
-UNBOUND = _crossinterp.UnboundItem.singleton('queue', __name__)
-
-
-def _serialize_unbound(unbound):
- if unbound is UNBOUND:
- unbound = _crossinterp.UNBOUND
- return _crossinterp.serialize_unbound(unbound)
-
-
-def _resolve_unbound(flag):
- resolved = _crossinterp.resolve_unbound(flag, ItemInterpreterDestroyed)
- if resolved is _crossinterp.UNBOUND:
- resolved = UNBOUND
- return resolved
-
-
-def create(maxsize=0, *, unbounditems=UNBOUND):
- """Return a new cross-interpreter queue.
-
- The queue may be used to pass data safely between interpreters.
-
- "unbounditems" sets the default for Queue.put(); see that method for
- supported values. The default value is UNBOUND, which replaces
- the unbound item.
- """
- unbound = _serialize_unbound(unbounditems)
- unboundop, = unbound
- qid = _queues.create(maxsize, unboundop, -1)
- self = Queue(qid)
- self._set_unbound(unboundop, unbounditems)
- return self
-
-
-def list_all():
- """Return a list of all open queues."""
- queues = []
- for qid, unboundop, _ in _queues.list_all():
- self = Queue(qid)
- if not hasattr(self, '_unbound'):
- self._set_unbound(unboundop)
- else:
- assert self._unbound[0] == unboundop
- queues.append(self)
- return queues
-
-
-_known_queues = weakref.WeakValueDictionary()
-
-class Queue:
- """A cross-interpreter queue."""
-
- def __new__(cls, id, /):
- # There is only one instance for any given ID.
- if isinstance(id, int):
- id = int(id)
- else:
- raise TypeError(f'id must be an int, got {id!r}')
- try:
- self = _known_queues[id]
- except KeyError:
- self = super().__new__(cls)
- self._id = id
- _known_queues[id] = self
- _queues.bind(id)
- return self
-
- def __del__(self):
- try:
- _queues.release(self._id)
- except QueueNotFoundError:
- pass
- try:
- del _known_queues[self._id]
- except KeyError:
- pass
-
- def __repr__(self):
- return f'{type(self).__name__}({self.id})'
-
- def __hash__(self):
- return hash(self._id)
-
- # for pickling:
- def __getnewargs__(self):
- return (self._id,)
-
- # for pickling:
- def __getstate__(self):
- return None
-
- def _set_unbound(self, op, items=None):
- assert not hasattr(self, '_unbound')
- if items is None:
- items = _resolve_unbound(op)
- unbound = (op, items)
- self._unbound = unbound
- return unbound
-
- @property
- def id(self):
- return self._id
-
- @property
- def unbounditems(self):
- try:
- _, items = self._unbound
- except AttributeError:
- op, _ = _queues.get_queue_defaults(self._id)
- _, items = self._set_unbound(op)
- return items
-
- @property
- def maxsize(self):
- try:
- return self._maxsize
- except AttributeError:
- self._maxsize = _queues.get_maxsize(self._id)
- return self._maxsize
-
- def empty(self):
- return self.qsize() == 0
-
- def full(self):
- return _queues.is_full(self._id)
-
- def qsize(self):
- return _queues.get_count(self._id)
-
- def put(self, obj, timeout=None, *,
- unbounditems=None,
- _delay=10 / 1000, # 10 milliseconds
- ):
- """Add the object to the queue.
-
- This blocks while the queue is full.
-
- For most objects, the object received through Queue.get() will
- be a new one, equivalent to the original and not sharing any
- actual underlying data. The notable exceptions include
- cross-interpreter types (like Queue) and memoryview, where the
- underlying data is actually shared. Furthermore, some types
- can be sent through a queue more efficiently than others. This
- group includes various immutable types like int, str, bytes, and
- tuple (if the items are likewise efficiently shareable). See interpreters.is_shareable().
-
- "unbounditems" controls the behavior of Queue.get() for the given
- object if the current interpreter (calling put()) is later
- destroyed.
-
- If "unbounditems" is None (the default) then it uses the
- queue's default, set with create_queue(),
- which is usually UNBOUND.
-
- If "unbounditems" is UNBOUND_ERROR then get() will raise an
- ItemInterpreterDestroyed exception if the original interpreter
- has been destroyed. This does not otherwise affect the queue;
- the next call to put() will work like normal, returning the next
- item in the queue.
-
- If "unbounditems" is UNBOUND_REMOVE then the item will be removed
- from the queue as soon as the original interpreter is destroyed.
- Be aware that this will introduce an imbalance between put()
- and get() calls.
-
- If "unbounditems" is UNBOUND then it is returned by get() in place
- of the unbound item.
- """
- if unbounditems is None:
- unboundop = -1
- else:
- unboundop, = _serialize_unbound(unbounditems)
- if timeout is not None:
- timeout = int(timeout)
- if timeout < 0:
- raise ValueError(f'timeout value must be non-negative')
- end = time.time() + timeout
- while True:
- try:
- _queues.put(self._id, obj, unboundop)
- except QueueFull as exc:
- if timeout is not None and time.time() >= end:
- raise # re-raise
- time.sleep(_delay)
- else:
- break
-
- def put_nowait(self, obj, *, unbounditems=None):
- if unbounditems is None:
- unboundop = -1
- else:
- unboundop, = _serialize_unbound(unbounditems)
- _queues.put(self._id, obj, unboundop)
-
- def get(self, timeout=None, *,
- _delay=10 / 1000, # 10 milliseconds
- ):
- """Return the next object from the queue.
-
- This blocks while the queue is empty.
-
- If the next item's original interpreter has been destroyed
- then the "next object" is determined by the value of the
- "unbounditems" argument to put().
- """
- if timeout is not None:
- timeout = int(timeout)
- if timeout < 0:
- raise ValueError(f'timeout value must be non-negative')
- end = time.time() + timeout
- while True:
- try:
- obj, unboundop = _queues.get(self._id)
- except QueueEmpty as exc:
- if timeout is not None and time.time() >= end:
- raise # re-raise
- time.sleep(_delay)
- else:
- break
- if unboundop is not None:
- assert obj is None, repr(obj)
- return _resolve_unbound(unboundop)
- return obj
-
- def get_nowait(self):
- """Return the next object from the channel.
-
- If the queue is empty then raise QueueEmpty. Otherwise this
- is the same as get().
- """
- try:
- obj, unboundop = _queues.get(self._id)
- except QueueEmpty as exc:
- raise # re-raise
- if unboundop is not None:
- assert obj is None, repr(obj)
- return _resolve_unbound(unboundop)
- return obj
-
-
-_queues._register_heap_types(Queue, QueueEmpty, QueueFull)
diff --git a/Lib/test/test__interpchannels.py b/Lib/test/test__interpchannels.py
index 88eee03a3de..858d31a73cf 100644
--- a/Lib/test/test__interpchannels.py
+++ b/Lib/test/test__interpchannels.py
@@ -9,7 +9,7 @@ import unittest
from test.support import import_helper, skip_if_sanitizer
_channels = import_helper.import_module('_interpchannels')
-from test.support.interpreters import _crossinterp
+from concurrent.interpreters import _crossinterp
from test.test__interpreters import (
_interpreters,
_run_output,
diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py
index 7ade4271b7a..bc39c86b8cf 100644
--- a/Lib/test/test_calendar.py
+++ b/Lib/test/test_calendar.py
@@ -417,7 +417,7 @@ class OutputTestCase(unittest.TestCase):
self.check_htmlcalendar_encoding('utf-8', 'utf-8')
def test_output_htmlcalendar_encoding_default(self):
- self.check_htmlcalendar_encoding(None, sys.getdefaultencoding())
+ self.check_htmlcalendar_encoding(None, 'utf-8')
def test_yeardatescalendar(self):
def shrink(cal):
diff --git a/Lib/test/test_capi/test_opt.py b/Lib/test/test_capi/test_opt.py
index 8a3819dabe4..bf22ef2a592 100644
--- a/Lib/test/test_capi/test_opt.py
+++ b/Lib/test/test_capi/test_opt.py
@@ -2275,6 +2275,21 @@ class TestUopsOptimization(unittest.TestCase):
self.assertIn("_UNPACK_SEQUENCE_TWO_TUPLE", uops)
self.assertNotIn("_GUARD_TOS_TUPLE", uops)
+ def test_unary_invert_long_type(self):
+ def testfunc(n):
+ for _ in range(n):
+ a = 9397
+ x = ~a + ~a
+
+ testfunc(TIER2_THRESHOLD)
+
+ ex = get_first_executor(testfunc)
+ self.assertIsNotNone(ex)
+ uops = get_opnames(ex)
+
+ self.assertNotIn("_GUARD_TOS_INT", uops)
+ self.assertNotIn("_GUARD_NOS_INT", uops)
+
def global_identity(x):
return x
diff --git a/Lib/test/test_concurrent_futures/test_interpreter_pool.py b/Lib/test/test_concurrent_futures/test_interpreter_pool.py
index f6c62ae4b20..5fd5684e103 100644
--- a/Lib/test/test_concurrent_futures/test_interpreter_pool.py
+++ b/Lib/test/test_concurrent_futures/test_interpreter_pool.py
@@ -8,10 +8,10 @@ import unittest
from concurrent.futures.interpreter import (
ExecutionFailed, BrokenInterpreterPool,
)
+from concurrent.interpreters import _queues as queues
import _interpreters
from test import support
import test.test_asyncio.utils as testasyncio_utils
-from test.support.interpreters import queues
from .executor import ExecutorTest, mul
from .util import BaseTestCase, InterpreterPoolMixin, setup_module
diff --git a/Lib/test/test_concurrent_futures/test_shutdown.py b/Lib/test/test_concurrent_futures/test_shutdown.py
index 7a4065afd46..99b315b47e2 100644
--- a/Lib/test/test_concurrent_futures/test_shutdown.py
+++ b/Lib/test/test_concurrent_futures/test_shutdown.py
@@ -330,6 +330,64 @@ class ProcessPoolShutdownTest(ExecutorShutdownTest):
# shutdown.
assert all([r == abs(v) for r, v in zip(res, range(-5, 5))])
+ @classmethod
+ def _failing_task_gh_132969(cls, n):
+ raise ValueError("failing task")
+
+ @classmethod
+ def _good_task_gh_132969(cls, n):
+ time.sleep(0.1 * n)
+ return n
+
+ def _run_test_issue_gh_132969(self, max_workers):
+ # max_workers=2 will repro exception
+ # max_workers=4 will repro exception and then hang
+
+ # Repro conditions
+ # max_tasks_per_child=1
+ # a task ends abnormally
+ # shutdown(wait=False) is called
+ start_method = self.get_context().get_start_method()
+ if (start_method == "fork" or
+ (start_method == "forkserver" and sys.platform.startswith("win"))):
+ self.skipTest(f"Skipping test for {start_method = }")
+ executor = futures.ProcessPoolExecutor(
+ max_workers=max_workers,
+ max_tasks_per_child=1,
+ mp_context=self.get_context())
+ f1 = executor.submit(ProcessPoolShutdownTest._good_task_gh_132969, 1)
+ f2 = executor.submit(ProcessPoolShutdownTest._failing_task_gh_132969, 2)
+ f3 = executor.submit(ProcessPoolShutdownTest._good_task_gh_132969, 3)
+ result = 0
+ try:
+ result += f1.result()
+ result += f2.result()
+ result += f3.result()
+ except ValueError:
+ # stop processing results upon first exception
+ pass
+
+ # Ensure that the executor cleans up after called
+ # shutdown with wait=False
+ executor_manager_thread = executor._executor_manager_thread
+ executor.shutdown(wait=False)
+ time.sleep(0.2)
+ executor_manager_thread.join()
+ return result
+
+ def test_shutdown_gh_132969_case_1(self):
+ # gh-132969: test that exception "object of type 'NoneType' has no len()"
+ # is not raised when shutdown(wait=False) is called.
+ result = self._run_test_issue_gh_132969(2)
+ self.assertEqual(result, 1)
+
+ def test_shutdown_gh_132969_case_2(self):
+ # gh-132969: test that process does not hang and
+ # exception "object of type 'NoneType' has no len()" is not raised
+ # when shutdown(wait=False) is called.
+ result = self._run_test_issue_gh_132969(4)
+ self.assertEqual(result, 1)
+
create_executor_tests(globals(), ProcessPoolShutdownTest,
executor_mixins=(ProcessPoolForkMixin,
diff --git a/Lib/test/test_cprofile.py b/Lib/test/test_cprofile.py
index 192c8eab26e..57e818b1c68 100644
--- a/Lib/test/test_cprofile.py
+++ b/Lib/test/test_cprofile.py
@@ -125,21 +125,22 @@ class CProfileTest(ProfileTest):
"""
gh-106152
generator.throw() should trigger a call in cProfile
- In the any() call below, there should be two entries for the generator:
- * one for the call to __next__ which gets a True and terminates any
- * one when the generator is garbage collected which will effectively
- do a throw.
"""
+
+ def gen():
+ yield
+
pr = self.profilerclass()
pr.enable()
- any(a == 1 for a in (1, 2))
+ g = gen()
+ try:
+ g.throw(SyntaxError)
+ except SyntaxError:
+ pass
pr.disable()
pr.create_stats()
- for func, (cc, nc, _, _, _) in pr.stats.items():
- if func[2] == "<genexpr>":
- self.assertEqual(cc, 1)
- self.assertEqual(nc, 1)
+ self.assertTrue(any("throw" in func[2] for func in pr.stats.keys())),
def test_bad_descriptor(self):
# gh-132250
diff --git a/Lib/test/test_free_threading/test_heapq.py b/Lib/test/test_free_threading/test_heapq.py
new file mode 100644
index 00000000000..f75fb264c8a
--- /dev/null
+++ b/Lib/test/test_free_threading/test_heapq.py
@@ -0,0 +1,240 @@
+import unittest
+
+import heapq
+
+from enum import Enum
+from threading import Thread, Barrier
+from random import shuffle, randint
+
+from test.support import threading_helper
+from test import test_heapq
+
+
+NTHREADS = 10
+OBJECT_COUNT = 5_000
+
+
+class Heap(Enum):
+ MIN = 1
+ MAX = 2
+
+
+@threading_helper.requires_working_threading()
+class TestHeapq(unittest.TestCase):
+ def setUp(self):
+ self.test_heapq = test_heapq.TestHeapPython()
+
+ def test_racing_heapify(self):
+ heap = list(range(OBJECT_COUNT))
+ shuffle(heap)
+
+ self.run_concurrently(
+ worker_func=heapq.heapify, args=(heap,), nthreads=NTHREADS
+ )
+ self.test_heapq.check_invariant(heap)
+
+ def test_racing_heappush(self):
+ heap = []
+
+ def heappush_func(heap):
+ for item in reversed(range(OBJECT_COUNT)):
+ heapq.heappush(heap, item)
+
+ self.run_concurrently(
+ worker_func=heappush_func, args=(heap,), nthreads=NTHREADS
+ )
+ self.test_heapq.check_invariant(heap)
+
+ def test_racing_heappop(self):
+ heap = self.create_heap(OBJECT_COUNT, Heap.MIN)
+
+ # Each thread pops (OBJECT_COUNT / NTHREADS) items
+ self.assertEqual(OBJECT_COUNT % NTHREADS, 0)
+ per_thread_pop_count = OBJECT_COUNT // NTHREADS
+
+ def heappop_func(heap, pop_count):
+ local_list = []
+ for _ in range(pop_count):
+ item = heapq.heappop(heap)
+ local_list.append(item)
+
+ # Each local list should be sorted
+ self.assertTrue(self.is_sorted_ascending(local_list))
+
+ self.run_concurrently(
+ worker_func=heappop_func,
+ args=(heap, per_thread_pop_count),
+ nthreads=NTHREADS,
+ )
+ self.assertEqual(len(heap), 0)
+
+ def test_racing_heappushpop(self):
+ heap = self.create_heap(OBJECT_COUNT, Heap.MIN)
+ pushpop_items = self.create_random_list(-5_000, 10_000, OBJECT_COUNT)
+
+ def heappushpop_func(heap, pushpop_items):
+ for item in pushpop_items:
+ popped_item = heapq.heappushpop(heap, item)
+ self.assertTrue(popped_item <= item)
+
+ self.run_concurrently(
+ worker_func=heappushpop_func,
+ args=(heap, pushpop_items),
+ nthreads=NTHREADS,
+ )
+ self.assertEqual(len(heap), OBJECT_COUNT)
+ self.test_heapq.check_invariant(heap)
+
+ def test_racing_heapreplace(self):
+ heap = self.create_heap(OBJECT_COUNT, Heap.MIN)
+ replace_items = self.create_random_list(-5_000, 10_000, OBJECT_COUNT)
+
+ def heapreplace_func(heap, replace_items):
+ for item in replace_items:
+ heapq.heapreplace(heap, item)
+
+ self.run_concurrently(
+ worker_func=heapreplace_func,
+ args=(heap, replace_items),
+ nthreads=NTHREADS,
+ )
+ self.assertEqual(len(heap), OBJECT_COUNT)
+ self.test_heapq.check_invariant(heap)
+
+ def test_racing_heapify_max(self):
+ max_heap = list(range(OBJECT_COUNT))
+ shuffle(max_heap)
+
+ self.run_concurrently(
+ worker_func=heapq.heapify_max, args=(max_heap,), nthreads=NTHREADS
+ )
+ self.test_heapq.check_max_invariant(max_heap)
+
+ def test_racing_heappush_max(self):
+ max_heap = []
+
+ def heappush_max_func(max_heap):
+ for item in range(OBJECT_COUNT):
+ heapq.heappush_max(max_heap, item)
+
+ self.run_concurrently(
+ worker_func=heappush_max_func, args=(max_heap,), nthreads=NTHREADS
+ )
+ self.test_heapq.check_max_invariant(max_heap)
+
+ def test_racing_heappop_max(self):
+ max_heap = self.create_heap(OBJECT_COUNT, Heap.MAX)
+
+ # Each thread pops (OBJECT_COUNT / NTHREADS) items
+ self.assertEqual(OBJECT_COUNT % NTHREADS, 0)
+ per_thread_pop_count = OBJECT_COUNT // NTHREADS
+
+ def heappop_max_func(max_heap, pop_count):
+ local_list = []
+ for _ in range(pop_count):
+ item = heapq.heappop_max(max_heap)
+ local_list.append(item)
+
+ # Each local list should be sorted
+ self.assertTrue(self.is_sorted_descending(local_list))
+
+ self.run_concurrently(
+ worker_func=heappop_max_func,
+ args=(max_heap, per_thread_pop_count),
+ nthreads=NTHREADS,
+ )
+ self.assertEqual(len(max_heap), 0)
+
+ def test_racing_heappushpop_max(self):
+ max_heap = self.create_heap(OBJECT_COUNT, Heap.MAX)
+ pushpop_items = self.create_random_list(-5_000, 10_000, OBJECT_COUNT)
+
+ def heappushpop_max_func(max_heap, pushpop_items):
+ for item in pushpop_items:
+ popped_item = heapq.heappushpop_max(max_heap, item)
+ self.assertTrue(popped_item >= item)
+
+ self.run_concurrently(
+ worker_func=heappushpop_max_func,
+ args=(max_heap, pushpop_items),
+ nthreads=NTHREADS,
+ )
+ self.assertEqual(len(max_heap), OBJECT_COUNT)
+ self.test_heapq.check_max_invariant(max_heap)
+
+ def test_racing_heapreplace_max(self):
+ max_heap = self.create_heap(OBJECT_COUNT, Heap.MAX)
+ replace_items = self.create_random_list(-5_000, 10_000, OBJECT_COUNT)
+
+ def heapreplace_max_func(max_heap, replace_items):
+ for item in replace_items:
+ heapq.heapreplace_max(max_heap, item)
+
+ self.run_concurrently(
+ worker_func=heapreplace_max_func,
+ args=(max_heap, replace_items),
+ nthreads=NTHREADS,
+ )
+ self.assertEqual(len(max_heap), OBJECT_COUNT)
+ self.test_heapq.check_max_invariant(max_heap)
+
+ @staticmethod
+ def is_sorted_ascending(lst):
+ """
+ Check if the list is sorted in ascending order (non-decreasing).
+ """
+ return all(lst[i - 1] <= lst[i] for i in range(1, len(lst)))
+
+ @staticmethod
+ def is_sorted_descending(lst):
+ """
+ Check if the list is sorted in descending order (non-increasing).
+ """
+ return all(lst[i - 1] >= lst[i] for i in range(1, len(lst)))
+
+ @staticmethod
+ def create_heap(size, heap_kind):
+ """
+ Create a min/max heap where elements are in the range (0, size - 1) and
+ shuffled before heapify.
+ """
+ heap = list(range(OBJECT_COUNT))
+ shuffle(heap)
+ if heap_kind == Heap.MIN:
+ heapq.heapify(heap)
+ else:
+ heapq.heapify_max(heap)
+
+ return heap
+
+ @staticmethod
+ def create_random_list(a, b, size):
+ """
+ Create a list of random numbers between a and b (inclusive).
+ """
+ return [randint(-a, b) for _ in range(size)]
+
+ def run_concurrently(self, worker_func, args, nthreads):
+ """
+ Run the worker function concurrently in multiple threads.
+ """
+ barrier = Barrier(nthreads)
+
+ def wrapper_func(*args):
+ # Wait for all threads to reach this point before proceeding.
+ barrier.wait()
+ worker_func(*args)
+
+ with threading_helper.catch_threading_exception() as cm:
+ workers = (
+ Thread(target=wrapper_func, args=args) for _ in range(nthreads)
+ )
+ with threading_helper.start_threads(workers):
+ pass
+
+ # Worker threads should not raise any exceptions
+ self.assertIsNone(cm.exc_value)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/Lib/test/test_generated_cases.py b/Lib/test/test_generated_cases.py
index 37046d8e1c0..6411e4318b6 100644
--- a/Lib/test/test_generated_cases.py
+++ b/Lib/test/test_generated_cases.py
@@ -56,14 +56,14 @@ class TestEffects(unittest.TestCase):
def test_effect_sizes(self):
stack = Stack()
inputs = [
- x := StackItem("x", None, "1"),
- y := StackItem("y", None, "oparg"),
- z := StackItem("z", None, "oparg*2"),
+ x := StackItem("x", "1"),
+ y := StackItem("y", "oparg"),
+ z := StackItem("z", "oparg*2"),
]
outputs = [
- StackItem("x", None, "1"),
- StackItem("b", None, "oparg*4"),
- StackItem("c", None, "1"),
+ StackItem("x", "1"),
+ StackItem("b", "oparg*4"),
+ StackItem("c", "1"),
]
null = CWriter.null()
stack.pop(z, null)
@@ -1103,32 +1103,6 @@ class TestGeneratedCases(unittest.TestCase):
"""
self.run_cases_test(input, output)
- def test_pointer_to_stackref(self):
- input = """
- inst(OP, (arg: _PyStackRef * -- out)) {
- out = *arg;
- DEAD(arg);
- }
- """
- output = """
- TARGET(OP) {
- #if Py_TAIL_CALL_INTERP
- int opcode = OP;
- (void)(opcode);
- #endif
- frame->instr_ptr = next_instr;
- next_instr += 1;
- INSTRUCTION_STATS(OP);
- _PyStackRef *arg;
- _PyStackRef out;
- arg = (_PyStackRef *)stack_pointer[-1].bits;
- out = *arg;
- stack_pointer[-1] = out;
- DISPATCH();
- }
- """
- self.run_cases_test(input, output)
-
def test_unused_cached_value(self):
input = """
op(FIRST, (arg1 -- out)) {
diff --git a/Lib/test/test_interpreters/test_api.py b/Lib/test/test_interpreters/test_api.py
index b3c9ef8efba..1403cd145b6 100644
--- a/Lib/test/test_interpreters/test_api.py
+++ b/Lib/test/test_interpreters/test_api.py
@@ -13,11 +13,11 @@ from test.support import script_helper
from test.support import import_helper
# Raise SkipTest if subinterpreters not supported.
_interpreters = import_helper.import_module('_interpreters')
+from concurrent import interpreters
from test.support import Py_GIL_DISABLED
-from test.support import interpreters
from test.support import force_not_colorized
import test._crossinterp_definitions as defs
-from test.support.interpreters import (
+from concurrent.interpreters import (
InterpreterError, InterpreterNotFoundError, ExecutionFailed,
)
from .utils import (
@@ -133,7 +133,7 @@ class CreateTests(TestBase):
main, = interpreters.list_all()
interp = interpreters.create()
out = _run_output(interp, dedent("""
- from test.support import interpreters
+ from concurrent import interpreters
interp = interpreters.create()
print(interp.id)
"""))
@@ -196,7 +196,7 @@ class GetCurrentTests(TestBase):
main = interpreters.get_main()
interp = interpreters.create()
out = _run_output(interp, dedent("""
- from test.support import interpreters
+ from concurrent import interpreters
cur = interpreters.get_current()
print(cur.id)
"""))
@@ -213,7 +213,7 @@ class GetCurrentTests(TestBase):
with self.subTest('subinterpreter'):
interp = interpreters.create()
out = _run_output(interp, dedent("""
- from test.support import interpreters
+ from concurrent import interpreters
cur = interpreters.get_current()
print(id(cur))
cur = interpreters.get_current()
@@ -225,7 +225,7 @@ class GetCurrentTests(TestBase):
with self.subTest('per-interpreter'):
interp = interpreters.create()
out = _run_output(interp, dedent("""
- from test.support import interpreters
+ from concurrent import interpreters
cur = interpreters.get_current()
print(id(cur))
"""))
@@ -582,7 +582,7 @@ class TestInterpreterClose(TestBase):
main, = interpreters.list_all()
interp = interpreters.create()
out = _run_output(interp, dedent(f"""
- from test.support import interpreters
+ from concurrent import interpreters
interp = interpreters.Interpreter({interp.id})
try:
interp.close()
@@ -599,7 +599,7 @@ class TestInterpreterClose(TestBase):
self.assertEqual(set(interpreters.list_all()),
{main, interp1, interp2})
interp1.exec(dedent(f"""
- from test.support import interpreters
+ from concurrent import interpreters
interp2 = interpreters.Interpreter({interp2.id})
interp2.close()
interp3 = interpreters.create()
@@ -806,7 +806,7 @@ class TestInterpreterExec(TestBase):
ham()
""")
scriptfile = self.make_script('script.py', tempdir, text="""
- from test.support import interpreters
+ from concurrent import interpreters
def script():
import spam
@@ -827,7 +827,7 @@ class TestInterpreterExec(TestBase):
~~~~~~~~~~~^^^^^^^^
{interpmod_line.strip()}
raise ExecutionFailed(excinfo)
- test.support.interpreters.ExecutionFailed: RuntimeError: uh-oh!
+ concurrent.interpreters.ExecutionFailed: RuntimeError: uh-oh!
Uncaught in the interpreter:
@@ -1281,7 +1281,7 @@ class TestInterpreterCall(TestBase):
# no module indirection
with self.subTest('no indirection'):
text = run(f"""
- from test.support import interpreters
+ from concurrent import interpreters
def spam():
# This a global var...
@@ -1301,7 +1301,7 @@ class TestInterpreterCall(TestBase):
""")
with self.subTest('indirect as func, direct interp'):
text = run(f"""
- from test.support import interpreters
+ from concurrent import interpreters
import mymod
def spam():
@@ -1317,7 +1317,7 @@ class TestInterpreterCall(TestBase):
# indirect as func, indirect interp
new_mod('mymod', f"""
- from test.support import interpreters
+ from concurrent import interpreters
def run(func):
interp = interpreters.create()
return interp.call(func)
diff --git a/Lib/test/test_interpreters/test_channels.py b/Lib/test/test_interpreters/test_channels.py
index 0c027b17cea..109ddf34453 100644
--- a/Lib/test/test_interpreters/test_channels.py
+++ b/Lib/test/test_interpreters/test_channels.py
@@ -8,8 +8,8 @@ import time
from test.support import import_helper
# Raise SkipTest if subinterpreters not supported.
_channels = import_helper.import_module('_interpchannels')
-from test.support import interpreters
-from test.support.interpreters import channels
+from concurrent import interpreters
+from test.support import channels
from .utils import _run_output, TestBase
@@ -171,7 +171,7 @@ class TestSendRecv(TestBase):
def test_send_recv_same_interpreter(self):
interp = interpreters.create()
interp.exec(dedent("""
- from test.support.interpreters import channels
+ from test.support import channels
r, s = channels.create()
orig = b'spam'
s.send_nowait(orig)
@@ -244,7 +244,7 @@ class TestSendRecv(TestBase):
def test_send_recv_nowait_same_interpreter(self):
interp = interpreters.create()
interp.exec(dedent("""
- from test.support.interpreters import channels
+ from test.support import channels
r, s = channels.create()
orig = b'spam'
s.send_nowait(orig)
@@ -387,7 +387,7 @@ class TestSendRecv(TestBase):
interp = interpreters.create()
_run_output(interp, dedent(f"""
- from test.support.interpreters import channels
+ from test.support import channels
sch = channels.SendChannel({sch.id})
obj1 = b'spam'
obj2 = b'eggs'
@@ -482,7 +482,7 @@ class TestSendRecv(TestBase):
self.assertEqual(_channels.get_count(rch.id), 0)
_run_output(interp, dedent(f"""
- from test.support.interpreters import channels
+ from test.support import channels
sch = channels.SendChannel({sch.id})
sch.send_nowait(1, unbounditems=channels.UNBOUND)
sch.send_nowait(2, unbounditems=channels.UNBOUND_ERROR)
@@ -518,7 +518,7 @@ class TestSendRecv(TestBase):
sch.send_nowait(1)
_run_output(interp1, dedent(f"""
- from test.support.interpreters import channels
+ from test.support import channels
rch = channels.RecvChannel({rch.id})
sch = channels.SendChannel({sch.id})
obj1 = rch.recv()
@@ -526,7 +526,7 @@ class TestSendRecv(TestBase):
sch.send_nowait(obj1, unbounditems=channels.UNBOUND_REMOVE)
"""))
_run_output(interp2, dedent(f"""
- from test.support.interpreters import channels
+ from test.support import channels
rch = channels.RecvChannel({rch.id})
sch = channels.SendChannel({sch.id})
obj2 = rch.recv()
diff --git a/Lib/test/test_interpreters/test_lifecycle.py b/Lib/test/test_interpreters/test_lifecycle.py
index ac24f6568ac..15537ac6cc8 100644
--- a/Lib/test/test_interpreters/test_lifecycle.py
+++ b/Lib/test/test_interpreters/test_lifecycle.py
@@ -119,7 +119,7 @@ class StartupTests(TestBase):
# The main interpreter's sys.path[0] should be used by subinterpreters.
script = '''
import sys
- from test.support import interpreters
+ from concurrent import interpreters
orig = sys.path[0]
@@ -170,7 +170,7 @@ class FinalizationTests(TestBase):
# is reported, even when subinterpreters get cleaned up at the end.
import subprocess
argv = [sys.executable, '-c', '''if True:
- from test.support import interpreters
+ from concurrent import interpreters
interp = interpreters.create()
raise Exception
''']
diff --git a/Lib/test/test_interpreters/test_queues.py b/Lib/test/test_interpreters/test_queues.py
index 757373904d7..3e982d76e86 100644
--- a/Lib/test/test_interpreters/test_queues.py
+++ b/Lib/test/test_interpreters/test_queues.py
@@ -7,8 +7,8 @@ import unittest
from test.support import import_helper, Py_DEBUG
# Raise SkipTest if subinterpreters not supported.
_queues = import_helper.import_module('_interpqueues')
-from test.support import interpreters
-from test.support.interpreters import queues, _crossinterp
+from concurrent import interpreters
+from concurrent.interpreters import _queues as queues, _crossinterp
from .utils import _run_output, TestBase as _TestBase
@@ -126,7 +126,7 @@ class QueueTests(TestBase):
interp = interpreters.create()
interp.exec(dedent(f"""
- from test.support.interpreters import queues
+ from concurrent.interpreters import _queues as queues
queue1 = queues.Queue({queue1.id})
"""));
@@ -324,7 +324,7 @@ class TestQueueOps(TestBase):
def test_put_get_same_interpreter(self):
interp = interpreters.create()
interp.exec(dedent("""
- from test.support.interpreters import queues
+ from concurrent.interpreters import _queues as queues
queue = queues.create()
"""))
for methname in ('get', 'get_nowait'):
@@ -351,7 +351,7 @@ class TestQueueOps(TestBase):
out = _run_output(
interp,
dedent(f"""
- from test.support.interpreters import queues
+ from concurrent.interpreters import _queues as queues
queue1 = queues.Queue({queue1.id})
queue2 = queues.Queue({queue2.id})
assert queue1.qsize() == 1, 'expected: queue1.qsize() == 1'
@@ -390,7 +390,7 @@ class TestQueueOps(TestBase):
interp = interpreters.create()
_run_output(interp, dedent(f"""
- from test.support.interpreters import queues
+ from concurrent.interpreters import _queues as queues
queue = queues.Queue({queue.id})
obj1 = b'spam'
obj2 = b'eggs'
@@ -468,7 +468,7 @@ class TestQueueOps(TestBase):
queue = queues.create()
interp = interpreters.create()
_run_output(interp, dedent(f"""
- from test.support.interpreters import queues
+ from concurrent.interpreters import _queues as queues
queue = queues.Queue({queue.id})
queue.put(1, unbounditems=queues.UNBOUND)
queue.put(2, unbounditems=queues.UNBOUND_ERROR)
@@ -504,14 +504,14 @@ class TestQueueOps(TestBase):
queue.put(1)
_run_output(interp1, dedent(f"""
- from test.support.interpreters import queues
+ from concurrent.interpreters import _queues as queues
queue = queues.Queue({queue.id})
obj1 = queue.get()
queue.put(2, unbounditems=queues.UNBOUND)
queue.put(obj1, unbounditems=queues.UNBOUND_REMOVE)
"""))
_run_output(interp2, dedent(f"""
- from test.support.interpreters import queues
+ from concurrent.interpreters import _queues as queues
queue = queues.Queue({queue.id})
obj2 = queue.get()
obj1 = queue.get()
diff --git a/Lib/test/test_interpreters/test_stress.py b/Lib/test/test_interpreters/test_stress.py
index fae2f38cb55..e25e67a0d4f 100644
--- a/Lib/test/test_interpreters/test_stress.py
+++ b/Lib/test/test_interpreters/test_stress.py
@@ -6,7 +6,7 @@ from test.support import import_helper
from test.support import threading_helper
# Raise SkipTest if subinterpreters not supported.
import_helper.import_module('_interpreters')
-from test.support import interpreters
+from concurrent import interpreters
from .utils import TestBase
diff --git a/Lib/test/test_interpreters/utils.py b/Lib/test/test_interpreters/utils.py
index c25e0fb7475..ae09aa457b4 100644
--- a/Lib/test/test_interpreters/utils.py
+++ b/Lib/test/test_interpreters/utils.py
@@ -21,7 +21,7 @@ try:
import _interpreters
except ImportError as exc:
raise unittest.SkipTest(str(exc))
-from test.support import interpreters
+from concurrent import interpreters
try:
diff --git a/Lib/test/test_locale.py b/Lib/test/test_locale.py
index 455d2af37ef..55b502e52ca 100644
--- a/Lib/test/test_locale.py
+++ b/Lib/test/test_locale.py
@@ -387,6 +387,10 @@ class NormalizeTest(unittest.TestCase):
self.check('c', 'C')
self.check('posix', 'C')
+ def test_c_utf8(self):
+ self.check('c.utf8', 'C.UTF-8')
+ self.check('C.UTF-8', 'C.UTF-8')
+
def test_english(self):
self.check('en', 'en_US.ISO8859-1')
self.check('EN', 'en_US.ISO8859-1')
diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py
index 54910cd8054..31ebcb3b8b0 100644
--- a/Lib/test/test_random.py
+++ b/Lib/test/test_random.py
@@ -14,6 +14,15 @@ from test import support
from fractions import Fraction
from collections import abc, Counter
+
+class MyIndex:
+ def __init__(self, value):
+ self.value = value
+
+ def __index__(self):
+ return self.value
+
+
class TestBasicOps:
# Superclass with tests common to all generators.
# Subclasses must arrange for self.gen to retrieve the Random instance
@@ -809,6 +818,9 @@ class MersenneTwister_TestBasicOps(TestBasicOps, unittest.TestCase):
self.gen.seed(1234567)
self.assertEqual(self.gen.getrandbits(100),
97904845777343510404718956115)
+ self.gen.seed(1234567)
+ self.assertEqual(self.gen.getrandbits(MyIndex(100)),
+ 97904845777343510404718956115)
def test_getrandbits_2G_bits(self):
size = 2**31
diff --git a/Lib/test/test_sqlite3/test_cli.py b/Lib/test/test_sqlite3/test_cli.py
index 37e0f74f688..d993e28c4bb 100644
--- a/Lib/test/test_sqlite3/test_cli.py
+++ b/Lib/test/test_sqlite3/test_cli.py
@@ -1,14 +1,22 @@
"""sqlite3 CLI tests."""
import sqlite3
+import sys
+import textwrap
import unittest
+import unittest.mock
+import os
from sqlite3.__main__ import main as cli
+from test.support.import_helper import import_module
from test.support.os_helper import TESTFN, unlink
+from test.support.pty_helper import run_pty
from test.support import (
captured_stdout,
captured_stderr,
captured_stdin,
force_not_colorized_test_class,
+ requires_subprocess,
+ verbose,
)
@@ -200,5 +208,108 @@ class InteractiveSession(unittest.TestCase):
self.assertIn('\x1b[1;35mOperationalError (SQLITE_ERROR)\x1b[0m: '
'\x1b[35mnear "sel": syntax error\x1b[0m', err)
+
+@requires_subprocess()
+@force_not_colorized_test_class
+class Completion(unittest.TestCase):
+ PS1 = "sqlite> "
+
+ @classmethod
+ def setUpClass(cls):
+ _sqlite3 = import_module("_sqlite3")
+ if not hasattr(_sqlite3, "SQLITE_KEYWORDS"):
+ raise unittest.SkipTest("unable to determine SQLite keywords")
+
+ readline = import_module("readline")
+ if readline.backend == "editline":
+ raise unittest.SkipTest("libedit readline is not supported")
+
+ def write_input(self, input_, env=None):
+ script = textwrap.dedent("""
+ import readline
+ from sqlite3.__main__ import main
+
+ readline.parse_and_bind("set colored-completion-prefix off")
+ main()
+ """)
+ return run_pty(script, input_, env)
+
+ def test_complete_sql_keywords(self):
+ # List candidates starting with 'S', there should be multiple matches.
+ input_ = b"S\t\tEL\t 1;\n.quit\n"
+ output = self.write_input(input_)
+ self.assertIn(b"SELECT", output)
+ self.assertIn(b"SET", output)
+ self.assertIn(b"SAVEPOINT", output)
+ self.assertIn(b"(1,)", output)
+
+ # Keywords are completed in upper case for even lower case user input.
+ input_ = b"sel\t\t 1;\n.quit\n"
+ output = self.write_input(input_)
+ self.assertIn(b"SELECT", output)
+ self.assertIn(b"(1,)", output)
+
+ @unittest.skipIf(sys.platform.startswith("freebsd"),
+ "Two actual tabs are inserted when there are no matching"
+ " completions in the pseudo-terminal opened by run_pty()"
+ " on FreeBSD")
+ def test_complete_no_match(self):
+ input_ = b"xyzzy\t\t\b\b\b\b\b\b\b.quit\n"
+ # Set NO_COLOR to disable coloring for self.PS1.
+ output = self.write_input(input_, env={**os.environ, "NO_COLOR": "1"})
+ lines = output.decode().splitlines()
+ indices = (
+ i for i, line in enumerate(lines, 1)
+ if line.startswith(f"{self.PS1}xyzzy")
+ )
+ line_num = next(indices, -1)
+ self.assertNotEqual(line_num, -1)
+ # Completions occupy lines, assert no extra lines when there is nothing
+ # to complete.
+ self.assertEqual(line_num, len(lines))
+
+ def test_complete_no_input(self):
+ from _sqlite3 import SQLITE_KEYWORDS
+
+ script = textwrap.dedent("""
+ import readline
+ from sqlite3.__main__ import main
+
+ # Configure readline to ...:
+ # - hide control sequences surrounding each candidate
+ # - hide "Display all xxx possibilities? (y or n)"
+ # - hide "--More--"
+ # - show candidates one per line
+ readline.parse_and_bind("set colored-completion-prefix off")
+ readline.parse_and_bind("set colored-stats off")
+ readline.parse_and_bind("set completion-query-items 0")
+ readline.parse_and_bind("set page-completions off")
+ readline.parse_and_bind("set completion-display-width 0")
+ readline.parse_and_bind("set show-all-if-ambiguous off")
+ readline.parse_and_bind("set show-all-if-unmodified off")
+
+ main()
+ """)
+ input_ = b"\t\t.quit\n"
+ output = run_pty(script, input_, env={**os.environ, "NO_COLOR": "1"})
+ try:
+ lines = output.decode().splitlines()
+ indices = [
+ i for i, line in enumerate(lines)
+ if line.startswith(self.PS1)
+ ]
+ self.assertEqual(len(indices), 2)
+ start, end = indices
+ candidates = [l.strip() for l in lines[start+1:end]]
+ self.assertEqual(candidates, sorted(SQLITE_KEYWORDS))
+ except:
+ if verbose:
+ print(' PTY output: '.center(30, '-'))
+ print(output.decode(errors='replace'))
+ print(' end PTY output '.center(30, '-'))
+ raise
+
+
+
if __name__ == "__main__":
unittest.main()
diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py
index 13aaba405e3..b09e524a756 100644
--- a/Lib/test/test_syntax.py
+++ b/Lib/test/test_syntax.py
@@ -382,6 +382,13 @@ SyntaxError: invalid syntax
Traceback (most recent call last):
SyntaxError: invalid syntax
+# But prefixes of soft keywords should
+# still raise specialized errors
+
+>>> (mat x)
+Traceback (most recent call last):
+SyntaxError: invalid syntax. Perhaps you forgot a comma?
+
From compiler_complex_args():
>>> def f(None=1):
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index bf415894903..39e62027f03 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -24,7 +24,7 @@ from test.support import import_helper
from test.support import force_not_colorized
from test.support import SHORT_TIMEOUT
try:
- from test.support import interpreters
+ from concurrent import interpreters
except ImportError:
interpreters = None
import textwrap
diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py
index 59b3a749d2f..125c2744698 100644
--- a/Lib/test/test_threading.py
+++ b/Lib/test/test_threading.py
@@ -28,7 +28,7 @@ from test import lock_tests
from test import support
try:
- from test.support import interpreters
+ from concurrent import interpreters
except ImportError:
interpreters = None
diff --git a/Lib/test/test_types.py b/Lib/test/test_types.py
index 9011e0e1962..a117413301b 100644
--- a/Lib/test/test_types.py
+++ b/Lib/test/test_types.py
@@ -2513,15 +2513,16 @@ class SubinterpreterTests(unittest.TestCase):
def setUpClass(cls):
global interpreters
try:
- from test.support import interpreters
+ from concurrent import interpreters
except ModuleNotFoundError:
raise unittest.SkipTest('subinterpreters required')
- import test.support.interpreters.channels # noqa: F401
+ from test.support import channels # noqa: F401
+ cls.create_channel = staticmethod(channels.create)
@cpython_only
@no_rerun('channels (and queues) might have a refleak; see gh-122199')
def test_static_types_inherited_slots(self):
- rch, sch = interpreters.channels.create()
+ rch, sch = self.create_channel()
script = textwrap.dedent("""
import test.support
@@ -2547,7 +2548,7 @@ class SubinterpreterTests(unittest.TestCase):
main_results = collate_results(raw)
interp = interpreters.create()
- interp.exec('from test.support import interpreters')
+ interp.exec('from concurrent import interpreters')
interp.prepare_main(sch=sch)
interp.exec(script)
raw = rch.recv_nowait()