aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Python/specialize.c
diff options
context:
space:
mode:
Diffstat (limited to 'Python/specialize.c')
-rw-r--r--Python/specialize.c91
1 files changed, 51 insertions, 40 deletions
diff --git a/Python/specialize.c b/Python/specialize.c
index 562e4a19d64..fe8d04cf344 100644
--- a/Python/specialize.c
+++ b/Python/specialize.c
@@ -118,6 +118,7 @@ _Py_GetSpecializationStats(void) {
err += add_stat_dict(stats, LOAD_GLOBAL, "load_global");
err += add_stat_dict(stats, STORE_SUBSCR, "store_subscr");
err += add_stat_dict(stats, STORE_ATTR, "store_attr");
+ err += add_stat_dict(stats, JUMP_BACKWARD, "jump_backward");
err += add_stat_dict(stats, CALL, "call");
err += add_stat_dict(stats, CALL_KW, "call_kw");
err += add_stat_dict(stats, BINARY_OP, "binary_op");
@@ -440,7 +441,9 @@ _Py_PrintSpecializationStats(int to_file)
#define SPECIALIZATION_FAIL(opcode, kind) \
do { \
if (_Py_stats) { \
- _Py_stats->opcode_stats[opcode].specialization.failure_kinds[kind]++; \
+ int _kind = (kind); \
+ assert(_kind < SPECIALIZATION_FAILURE_KINDS); \
+ _Py_stats->opcode_stats[opcode].specialization.failure_kinds[_kind]++; \
} \
} while (0)
@@ -2145,7 +2148,7 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs)
}
/* len(o) */
PyInterpreterState *interp = _PyInterpreterState_GET();
- if (callable == interp->callable_cache.len) {
+ if (callable == interp->callable_cache.len && instr->op.arg == 1) {
specialize(instr, CALL_LEN);
return 0;
}
@@ -2156,7 +2159,7 @@ specialize_c_call(PyObject *callable, _Py_CODEUNIT *instr, int nargs)
if (nargs == 2) {
/* isinstance(o1, o2) */
PyInterpreterState *interp = _PyInterpreterState_GET();
- if (callable == interp->callable_cache.isinstance) {
+ if (callable == interp->callable_cache.isinstance && instr->op.arg == 2) {
specialize(instr, CALL_ISINSTANCE);
return 0;
}
@@ -2655,6 +2658,10 @@ _Py_Specialize_BinaryOp(_PyStackRef lhs_st, _PyStackRef rhs_st, _Py_CODEUNIT *in
specialize(instr, BINARY_OP_SUBSCR_DICT);
return;
}
+ if (PyList_CheckExact(lhs) && PySlice_Check(rhs)) {
+ specialize(instr, BINARY_OP_SUBSCR_LIST_SLICE);
+ return;
+ }
unsigned int tp_version;
PyTypeObject *container_type = Py_TYPE(lhs);
PyObject *descriptor = _PyType_LookupRefAndVersion(container_type, &_Py_ID(__getitem__), &tp_version);
@@ -2898,53 +2905,57 @@ int
#endif // Py_STATS
Py_NO_INLINE void
-_Py_Specialize_ForIter(_PyStackRef iter, _Py_CODEUNIT *instr, int oparg)
+_Py_Specialize_ForIter(_PyStackRef iter, _PyStackRef null_or_index, _Py_CODEUNIT *instr, int oparg)
{
assert(ENABLE_SPECIALIZATION_FT);
assert(_PyOpcode_Caches[FOR_ITER] == INLINE_CACHE_ENTRIES_FOR_ITER);
PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter);
PyTypeObject *tp = Py_TYPE(iter_o);
+
+ if (PyStackRef_IsNull(null_or_index)) {
#ifdef Py_GIL_DISABLED
- // Only specialize for uniquely referenced iterators, so that we know
- // they're only referenced by this one thread. This is more limiting
- // than we need (even `it = iter(mylist); for item in it:` won't get
- // specialized) but we don't have a way to check whether we're the only
- // _thread_ who has access to the object.
- if (!_PyObject_IsUniquelyReferenced(iter_o))
- goto failure;
-#endif
- if (tp == &PyListIter_Type) {
-#ifdef Py_GIL_DISABLED
- _PyListIterObject *it = (_PyListIterObject *)iter_o;
- if (!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) &&
- !_PyObject_GC_IS_SHARED(it->it_seq)) {
- // Maybe this should just set GC_IS_SHARED in a critical
- // section, instead of leaving it to the first iteration?
+ // Only specialize for uniquely referenced iterators, so that we know
+ // they're only referenced by this one thread. This is more limiting
+ // than we need (even `it = iter(mylist); for item in it:` won't get
+ // specialized) but we don't have a way to check whether we're the only
+ // _thread_ who has access to the object.
+ if (!_PyObject_IsUniquelyReferenced(iter_o)) {
goto failure;
}
#endif
- specialize(instr, FOR_ITER_LIST);
- return;
- }
- else if (tp == &PyTupleIter_Type) {
- specialize(instr, FOR_ITER_TUPLE);
- return;
- }
- else if (tp == &PyRangeIter_Type) {
- specialize(instr, FOR_ITER_RANGE);
- return;
+ if (tp == &PyRangeIter_Type) {
+ specialize(instr, FOR_ITER_RANGE);
+ return;
+ }
+ else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
+ // Generators are very much not thread-safe, so don't worry about
+ // the specialization not being thread-safe.
+ assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
+ instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
+ );
+ /* Don't specialize if PEP 523 is active */
+ if (_PyInterpreterState_GET()->eval_frame) {
+ goto failure;
+ }
+ specialize(instr, FOR_ITER_GEN);
+ return;
+ }
}
- else if (tp == &PyGen_Type && oparg <= SHRT_MAX) {
- // Generators are very much not thread-safe, so don't worry about
- // the specialization not being thread-safe.
- assert(instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == END_FOR ||
- instr[oparg + INLINE_CACHE_ENTRIES_FOR_ITER + 1].op.code == INSTRUMENTED_END_FOR
- );
- /* Don't specialize if PEP 523 is active */
- if (_PyInterpreterState_GET()->eval_frame)
- goto failure;
- specialize(instr, FOR_ITER_GEN);
- return;
+ else {
+ if (tp == &PyList_Type) {
+#ifdef Py_GIL_DISABLED
+ // Only specialize for lists owned by this thread or shared
+ if (!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)) {
+ goto failure;
+ }
+#endif
+ specialize(instr, FOR_ITER_LIST);
+ return;
+ }
+ else if (tp == &PyTuple_Type) {
+ specialize(instr, FOR_ITER_TUPLE);
+ return;
+ }
}
failure:
SPECIALIZATION_FAIL(FOR_ITER,