aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Python/specialize.c
diff options
context:
space:
mode:
Diffstat (limited to 'Python/specialize.c')
-rw-r--r--Python/specialize.c78
1 files changed, 41 insertions, 37 deletions
diff --git a/Python/specialize.c b/Python/specialize.c
index 06995d46d8b..92f79d39d55 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -2904,53 +2904,57 @@ int
#endif // Py_STATS
Py_NO_INLINE void
-_Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg)
+_Py_Specialize_ForIter(_PyStackRef iter, _PyStackRef null_or_index, _Py_CODEUNIT *instr, int oparg)
{
assert(ENABLE_SPECIALIZATION_FT);
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
PyTypeObject *tp = Py_TYPE(iter_o);
+
+ if (PyStackRef_IsNull(null_or_index)) {
#ifdef Py_GIL_DISABLED
- // Only specialize for uniquely referenced iterators, so that we know
- // they're only referenced by this one thread. This is more limiting
- // than we need (even `it = iter(mylist); for item in it:` won't get
- // specialized) but we don't have a way to check whether we're the only
- // _thread_ who has access to the object.
- if (!_PyObject_IsUniquelyReferenced(iter_o))
- goto failure;
-#endif
- if (tp == &PyListIter_Type) {
-#ifdef Py_GIL_DISABLED
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) &&
- !_PyObject_GC_IS_SHARED(it->it_seq)) {
- // Maybe this should just set GC_IS_SHARED in a critical
- // section, instead of leaving it to the first iteration?
+ // Only specialize for uniquely referenced iterators, so that we know
+ // they're only referenced by this one thread. This is more limiting
+ // than we need (even `it = iter(mylist); for item in it:` won't get
+ // specialized) but we don't have a way to check whether we're the only
+ // _thread_ who has access to the object.
+ if (!_PyObject_IsUniquelyReferenced(iter_o)) {
goto failure;
}
#endif
- specialize(instr, FOR_ITER_LIST);
- return;
- }
- else if (tp == &PyTupleIter_Type) {
- specialize(instr, FOR_ITER_TUPLE);
- return;
- }
- else if (tp == &PyRangeIter_Type) {
- specialize(instr, FOR_ITER_RANGE);
- return;
+ if (tp == &PyRangeIter_Type) {
+ specialize(instr, FOR_ITER_RANGE);
+ return;
+ }
+ else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
+ // Generators are very much not thread-safe, so don't worry about
+ // the specialization not being thread-safe.
+ assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
+ instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
+ );
+ /* Don't specialize if PEP 523 is active */
+ if (_PyInterpreterState_GET()->eval_frame) {
+ goto failure;
+ }
+ specialize(instr, FOR_ITER_GEN);
+ return;
+ }
}
- else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
- // Generators are very much not thread-safe, so don't worry about
- // the specialization not being thread-safe.
- assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
- instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
- );
- /* Don't specialize if PEP 523 is active */
- if (_PyInterpreterState_GET()->eval_frame)
- goto failure;
- specialize(instr, FOR_ITER_GEN);
- return;
+ else {
+ if (tp == &PyList_Type) {
+#ifdef Py_GIL_DISABLED
+ // Only specialize for lists owned by this thread or shared
+ if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
+ goto failure;
+ }
+#endif
+ specialize(instr, FOR_ITER_LIST);
+ return;
+ }
+ else if (tp == &PyTuple_Type) {
+ specialize(instr, FOR_ITER_TUPLE);
+ return;
+ }
}
failure:
SPECIALIZATION_FAIL(FOR_ITER,