diff options
Diffstat (limited to 'Python/bytecodes.c')
-rw-r--r-- | Python/bytecodes.c | 492 |
1 files changed, 258 insertions, 234 deletions
diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 6a076662640..a1f8d360528 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -295,55 +295,18 @@ dummy_func( value2 = PyStackRef_Borrow(GETLOCAL(oparg2)); } - family(LOAD_CONST, 0) = { - LOAD_CONST_MORTAL, - LOAD_CONST_IMMORTAL, - }; - inst(LOAD_CONST, (-- value)) { - /* We can't do this in the bytecode compiler as - * marshalling can intern strings and make them immortal. */ - PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); - value = PyStackRef_FromPyObjectNew(obj); -#if ENABLE_SPECIALIZATION_FT -#ifdef Py_GIL_DISABLED - uint8_t expected = LOAD_CONST; - if (!_Py_atomic_compare_exchange_uint8( - &this_instr->op.code, &expected, - _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL)) { - // We might lose a race with instrumentation, which we don't care about. - assert(expected >= MIN_INSTRUMENTED_OPCODE); - } -#else - if (this_instr->op.code == LOAD_CONST) { - this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL; - } -#endif -#endif - } - - inst(LOAD_CONST_MORTAL, (-- value)) { PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); - value = PyStackRef_FromPyObjectNewMortal(obj); - } - - inst(LOAD_CONST_IMMORTAL, (-- value)) { - PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg); - assert(_Py_IsImmortal(obj)); - value = PyStackRef_FromPyObjectImmortal(obj); + value = PyStackRef_FromPyObjectBorrow(obj); } replicate(4) inst(LOAD_SMALL_INT, (-- value)) { assert(oparg < _PY_NSMALLPOSINTS); PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg]; - value = PyStackRef_FromPyObjectImmortal(obj); + value = PyStackRef_FromPyObjectBorrow(obj); } replicate(8) inst(STORE_FAST, (value --)) { - assert( - ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) || - PyStackRef_IsHeapSafe(value) - ); _PyStackRef tmp = GETLOCAL(oparg); GETLOCAL(oparg) = value; DEAD(value); @@ -355,10 +318,6 @@ dummy_func( }; inst(STORE_FAST_LOAD_FAST, (value1 -- value2)) { - assert( - ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) || - PyStackRef_IsHeapSafe(value1) - ); uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; _PyStackRef tmp = GETLOCAL(oparg1); @@ -369,14 +328,6 @@ dummy_func( } inst(STORE_FAST_STORE_FAST, (value2, value1 --)) { - assert( - ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) || - PyStackRef_IsHeapSafe(value1) - ); - assert( - ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) || - PyStackRef_IsHeapSafe(value2) - ); uint32_t oparg1 = oparg >> 4; uint32_t oparg2 = oparg & 15; _PyStackRef tmp = GETLOCAL(oparg1); @@ -390,7 +341,12 @@ dummy_func( } pure inst(POP_TOP, (value --)) { - PyStackRef_CLOSE(value); + PyStackRef_XCLOSE(value); + } + + tier2 op(_POP_TWO, (nos, tos --)) { + PyStackRef_CLOSE(tos); + PyStackRef_CLOSE(nos); } pure inst(PUSH_NULL, (-- res)) { @@ -406,9 +362,14 @@ dummy_func( PyStackRef_CLOSE(value); } - macro(POP_ITER) = POP_TOP; - no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, value -- receiver)) { + inst(POP_ITER, (iter, index_or_null -- )) { + (void)index_or_null; + DEAD(index_or_null); + PyStackRef_CLOSE(iter); + } + + no_save_ip tier1 inst(INSTRUMENTED_END_FOR, (receiver, index_or_null, value -- receiver, index_or_null)) { /* Need to create a fake StopIteration error here, * to conform to PEP 380 */ if (PyStackRef_GenCheck(receiver)) { @@ -420,7 +381,9 @@ dummy_func( PyStackRef_CLOSE(value); } - tier1 inst(INSTRUMENTED_POP_ITER, (iter -- )) { + tier1 inst(INSTRUMENTED_POP_ITER, (iter, index_or_null -- )) { + (void)index_or_null; + DEAD(index_or_null); INSTRUMENTED_JUMP(prev_instr, this_instr+1, PY_MONITORING_EVENT_BRANCH_RIGHT); PyStackRef_CLOSE(iter); } @@ -942,7 +905,7 @@ dummy_func( PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc); DEAD(sub_st); PyStackRef_CLOSE(str_st); - res = PyStackRef_FromPyObjectImmortal(res_o); + res = PyStackRef_FromPyObjectBorrow(res_o); } op(_GUARD_NOS_TUPLE, (nos, unused -- nos, unused)) { @@ -3085,15 +3048,24 @@ dummy_func( values_or_none = PyStackRef_FromPyObjectSteal(values_or_none_o); } - inst(GET_ITER, (iterable -- iter)) { + inst(GET_ITER, (iterable -- iter, index_or_null)) { #ifdef Py_STATS _Py_GatherStats_GetIter(iterable); #endif /* before: [obj]; after [getiter(obj)] */ - PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable)); - PyStackRef_CLOSE(iterable); - ERROR_IF(iter_o == NULL); - iter = PyStackRef_FromPyObjectSteal(iter_o); + PyTypeObject *tp = PyStackRef_TYPE(iterable); + if (tp == &PyTuple_Type || tp == &PyList_Type) { + iter = iterable; + DEAD(iterable); + index_or_null = PyStackRef_TagInt(0); + } + else { + PyObject *iter_o = PyObject_GetIter(PyStackRef_AsPyObjectBorrow(iterable)); + PyStackRef_CLOSE(iterable); + ERROR_IF(iter_o == NULL); + iter = PyStackRef_FromPyObjectSteal(iter_o); + index_or_null = PyStackRef_NULL; + } } inst(GET_YIELD_FROM_ITER, (iterable -- iter)) { @@ -3140,11 +3112,11 @@ dummy_func( FOR_ITER_GEN, }; - specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter -- iter)) { + specializing op(_SPECIALIZE_FOR_ITER, (counter/1, iter, null_or_index -- iter, null_or_index)) { #if ENABLE_SPECIALIZATION_FT if (ADAPTIVE_COUNTER_TRIGGERS(counter)) { next_instr = this_instr; - _Py_Specialize_ForIter(iter, next_instr, oparg); + _Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg); DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(FOR_ITER); @@ -3152,33 +3124,44 @@ dummy_func( #endif /* ENABLE_SPECIALIZATION_FT */ } - replaced op(_FOR_ITER, (iter -- iter, next)) { + replaced op(_FOR_ITER, (iter, null_or_index -- iter, null_or_index, next)) { /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */ PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); - if (next_o == NULL) { - if (_PyErr_Occurred(tstate)) { - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - if (!matches) { - ERROR_NO_POP(); + if (PyStackRef_IsTaggedInt(null_or_index)) { + next = _PyForIter_NextWithIndex(iter_o, null_or_index); + if (PyStackRef_IsNull(next)) { + null_or_index = PyStackRef_TagInt(-1); + JUMPBY(oparg + 1); + DISPATCH(); + } + null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); + } + else { + PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); + if (next_o == NULL) { + if (_PyErr_Occurred(tstate)) { + int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); + if (!matches) { + ERROR_NO_POP(); + } + _PyEval_MonitorRaise(tstate, frame, this_instr); + _PyErr_Clear(tstate); } - _PyEval_MonitorRaise(tstate, frame, this_instr); - _PyErr_Clear(tstate); + /* iterator ended normally */ + assert(next_instr[oparg].op.code == END_FOR || + next_instr[oparg].op.code == INSTRUMENTED_END_FOR); + /* Jump forward oparg, then skip following END_FOR */ + JUMPBY(oparg + 1); + DISPATCH(); } - /* iterator ended normally */ - assert(next_instr[oparg].op.code == END_FOR || - next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - /* Jump forward oparg, then skip following END_FOR */ - JUMPBY(oparg + 1); - DISPATCH(); + next = PyStackRef_FromPyObjectSteal(next_o); } - next = PyStackRef_FromPyObjectSteal(next_o); - // Common case: no jump, leave it to the code generator } - op(_FOR_ITER_TIER_TWO, (iter -- iter, next)) { + op(_FOR_ITER_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) { /* before: [iter]; after: [iter, iter()] *or* [] (and jump over END_FOR.) */ PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); + EXIT_IF(!PyStackRef_IsNull(null_or_index)); PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); if (next_o == NULL) { if (_PyErr_Occurred(tstate)) { @@ -3200,63 +3183,63 @@ dummy_func( macro(FOR_ITER) = _SPECIALIZE_FOR_ITER + _FOR_ITER; - inst(INSTRUMENTED_FOR_ITER, (unused/1, iter -- iter, next)) { + inst(INSTRUMENTED_FOR_ITER, (unused/1, iter, null_or_index -- iter, null_or_index, next)) { PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); - if (next_o != NULL) { - next = PyStackRef_FromPyObjectSteal(next_o); + if (PyStackRef_IsTaggedInt(null_or_index)) { + next = _PyForIter_NextWithIndex(iter_o, null_or_index); + if (PyStackRef_IsNull(next)) { + JUMPBY(oparg + 1); + DISPATCH(); + } INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); } else { - if (_PyErr_Occurred(tstate)) { - int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); - if (!matches) { - ERROR_NO_POP(); + PyObject *next_o = (*Py_TYPE(iter_o)->tp_iternext)(iter_o); + if (next_o != NULL) { + next = PyStackRef_FromPyObjectSteal(next_o); + INSTRUMENTED_JUMP(this_instr, next_instr, PY_MONITORING_EVENT_BRANCH_LEFT); + } + else { + if (_PyErr_Occurred(tstate)) { + int matches = _PyErr_ExceptionMatches(tstate, PyExc_StopIteration); + if (!matches) { + ERROR_NO_POP(); + } + _PyEval_MonitorRaise(tstate, frame, this_instr); + _PyErr_Clear(tstate); } - _PyEval_MonitorRaise(tstate, frame, this_instr); - _PyErr_Clear(tstate); + /* iterator ended normally */ + assert(next_instr[oparg].op.code == END_FOR || + next_instr[oparg].op.code == INSTRUMENTED_END_FOR); + /* Skip END_FOR */ + JUMPBY(oparg + 1); + DISPATCH(); } - /* iterator ended normally */ - assert(next_instr[oparg].op.code == END_FOR || - next_instr[oparg].op.code == INSTRUMENTED_END_FOR); - /* Skip END_FOR */ - JUMPBY(oparg + 1); - DISPATCH(); } } - op(_ITER_CHECK_LIST, (iter -- iter)) { + op(_ITER_CHECK_LIST, (iter, null_or_index -- iter, null_or_index)) { PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - EXIT_IF(Py_TYPE(iter_o) != &PyListIter_Type); + EXIT_IF(Py_TYPE(iter_o) != &PyList_Type); + assert(PyStackRef_IsTaggedInt(null_or_index)); #ifdef Py_GIL_DISABLED - EXIT_IF(!_PyObject_IsUniquelyReferenced(iter_o)); - _PyListIterObject *it = (_PyListIterObject *)iter_o; - EXIT_IF(!_Py_IsOwnedByCurrentThread((PyObject *)it->it_seq) || - !_PyObject_GC_IS_SHARED(it->it_seq)); + EXIT_IF(!_Py_IsOwnedByCurrentThread(iter_o) && !_PyObject_GC_IS_SHARED(iter_o)); #endif } - replaced op(_ITER_JUMP_LIST, (iter -- iter)) { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - assert(Py_TYPE(iter_o) == &PyListIter_Type); -// For free-threaded Python, the loop exit can happen at any point during -// item retrieval, so it doesn't make much sense to check and jump -// separately before item retrieval. Any length check we do here can be -// invalid by the time we actually try to fetch the item. + replaced op(_ITER_JUMP_LIST, (iter, null_or_index -- iter, null_or_index)) { #ifdef Py_GIL_DISABLED - assert(_PyObject_IsUniquelyReferenced(iter_o)); - (void)iter_o; + // For free-threaded Python, the loop exit can happen at any point during + // item retrieval, so it doesn't make much sense to check and jump + // separately before item retrieval. Any length check we do here can be + // invalid by the time we actually try to fetch the item. #else - _PyListIterObject *it = (_PyListIterObject *)iter_o; + PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter); + assert(Py_TYPE(list_o) == &PyList_Type); STAT_INC(FOR_ITER, hit); - PyListObject *seq = it->it_seq; - if (seq == NULL || (size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { - it->it_index = -1; - if (seq != NULL) { - it->it_seq = NULL; - Py_DECREF(seq); - } + if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)) { + null_or_index = PyStackRef_TagInt(-1); /* Jump forward oparg, then skip following END_FOR instruction */ JUMPBY(oparg + 1); DISPATCH(); @@ -3265,73 +3248,54 @@ dummy_func( } // Only used by Tier 2 - op(_GUARD_NOT_EXHAUSTED_LIST, (iter -- iter)) { + op(_GUARD_NOT_EXHAUSTED_LIST, (iter, null_or_index -- iter, null_or_index)) { #ifndef Py_GIL_DISABLED - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - _PyListIterObject *it = (_PyListIterObject *)iter_o; - assert(Py_TYPE(iter_o) == &PyListIter_Type); - PyListObject *seq = it->it_seq; - EXIT_IF(seq == NULL); - if ((size_t)it->it_index >= (size_t)PyList_GET_SIZE(seq)) { - it->it_index = -1; - EXIT_IF(1); - } + PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter); + assert(Py_TYPE(list_o) == &PyList_Type); + EXIT_IF((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyList_GET_SIZE(list_o)); #endif } - replaced op(_ITER_NEXT_LIST, (iter -- iter, next)) { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - _PyListIterObject *it = (_PyListIterObject *)iter_o; - assert(Py_TYPE(iter_o) == &PyListIter_Type); - PyListObject *seq = it->it_seq; - assert(seq); + replaced op(_ITER_NEXT_LIST, (iter, null_or_index -- iter, null_or_index, next)) { + PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter); + assert(PyList_CheckExact(list_o)); #ifdef Py_GIL_DISABLED - assert(_PyObject_IsUniquelyReferenced(iter_o)); - assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) || - _PyObject_GC_IS_SHARED(seq)); + assert(_Py_IsOwnedByCurrentThread(list_o) || + _PyObject_GC_IS_SHARED(list_o)); STAT_INC(FOR_ITER, hit); - int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next); + int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next); // A negative result means we lost a race with another thread // and we need to take the slow path. DEOPT_IF(result < 0); if (result == 0) { - it->it_index = -1; + null_or_index = PyStackRef_TagInt(-1); /* Jump forward oparg, then skip following END_FOR instruction */ JUMPBY(oparg + 1); DISPATCH(); } - it->it_index++; #else - assert(it->it_index < PyList_GET_SIZE(seq)); - next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++)); + next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index))); #endif + null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); } // Only used by Tier 2 - op(_ITER_NEXT_LIST_TIER_TWO, (iter -- iter, next)) { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - _PyListIterObject *it = (_PyListIterObject *)iter_o; - assert(Py_TYPE(iter_o) == &PyListIter_Type); - PyListObject *seq = it->it_seq; - assert(seq); + op(_ITER_NEXT_LIST_TIER_TWO, (iter, null_or_index -- iter, null_or_index, next)) { + PyObject *list_o = PyStackRef_AsPyObjectBorrow(iter); + assert(PyList_CheckExact(list_o)); #ifdef Py_GIL_DISABLED - assert(_PyObject_IsUniquelyReferenced(iter_o)); - assert(_Py_IsOwnedByCurrentThread((PyObject *)seq) || - _PyObject_GC_IS_SHARED(seq)); + assert(_Py_IsOwnedByCurrentThread((PyObject *)list_o) || + _PyObject_GC_IS_SHARED(list_o)); STAT_INC(FOR_ITER, hit); - int result = _PyList_GetItemRefNoLock(seq, it->it_index, &next); + int result = _PyList_GetItemRefNoLock((PyListObject *)list_o, PyStackRef_UntagInt(null_or_index), &next); // A negative result means we lost a race with another thread // and we need to take the slow path. - EXIT_IF(result < 0); - if (result == 0) { - it->it_index = -1; - EXIT_IF(1); - } - it->it_index++; + DEOPT_IF(result <= 0); #else - assert(it->it_index < PyList_GET_SIZE(seq)); - next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(seq, it->it_index++)); + assert(PyStackRef_UntagInt(null_or_index) < PyList_GET_SIZE(list_o)); + next = PyStackRef_FromPyObjectNew(PyList_GET_ITEM(list_o, PyStackRef_UntagInt(null_or_index))); #endif + null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); } macro(FOR_ITER_LIST) = @@ -3340,31 +3304,19 @@ dummy_func( _ITER_JUMP_LIST + _ITER_NEXT_LIST; - op(_ITER_CHECK_TUPLE, (iter -- iter)) { + op(_ITER_CHECK_TUPLE, (iter, null_or_index -- iter, null_or_index)) { PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - EXIT_IF(Py_TYPE(iter_o) != &PyTupleIter_Type); -#ifdef Py_GIL_DISABLED - EXIT_IF(!_PyObject_IsUniquelyReferenced(iter_o)); -#endif + EXIT_IF(Py_TYPE(iter_o) != &PyTuple_Type); + assert(PyStackRef_IsTaggedInt(null_or_index)); } - replaced op(_ITER_JUMP_TUPLE, (iter -- iter)) { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - (void)iter_o; - assert(Py_TYPE(iter_o) == &PyTupleIter_Type); -#ifdef Py_GIL_DISABLED - assert(_PyObject_IsUniquelyReferenced(iter_o)); -#endif - _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o; + replaced op(_ITER_JUMP_TUPLE, (iter, null_or_index -- iter, null_or_index)) { + PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter); + (void)tuple_o; + assert(Py_TYPE(tuple_o) == &PyTuple_Type); STAT_INC(FOR_ITER, hit); - PyTupleObject *seq = it->it_seq; - if (seq == NULL || (size_t)it->it_index >= (size_t)PyTuple_GET_SIZE(seq)) { -#ifndef Py_GIL_DISABLED - if (seq != NULL) { - it->it_seq = NULL; - Py_DECREF(seq); - } -#endif + if ((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)) { + null_or_index = PyStackRef_TagInt(-1); /* Jump forward oparg, then skip following END_FOR instruction */ JUMPBY(oparg + 1); DISPATCH(); @@ -3372,29 +3324,19 @@ dummy_func( } // Only used by Tier 2 - op(_GUARD_NOT_EXHAUSTED_TUPLE, (iter -- iter)) { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o; - assert(Py_TYPE(iter_o) == &PyTupleIter_Type); -#ifdef Py_GIL_DISABLED - assert(_PyObject_IsUniquelyReferenced(iter_o)); -#endif - PyTupleObject *seq = it->it_seq; - EXIT_IF(seq == NULL); - EXIT_IF(it->it_index >= PyTuple_GET_SIZE(seq)); + op(_GUARD_NOT_EXHAUSTED_TUPLE, (iter, null_or_index -- iter, null_or_index)) { + PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter); + assert(Py_TYPE(tuple_o) == &PyTuple_Type); + EXIT_IF((size_t)PyStackRef_UntagInt(null_or_index) >= (size_t)PyTuple_GET_SIZE(tuple_o)); } - op(_ITER_NEXT_TUPLE, (iter -- iter, next)) { - PyObject *iter_o = PyStackRef_AsPyObjectBorrow(iter); - _PyTupleIterObject *it = (_PyTupleIterObject *)iter_o; - assert(Py_TYPE(iter_o) == &PyTupleIter_Type); - PyTupleObject *seq = it->it_seq; -#ifdef Py_GIL_DISABLED - assert(_PyObject_IsUniquelyReferenced(iter_o)); -#endif - assert(seq); - assert(it->it_index < PyTuple_GET_SIZE(seq)); - next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(seq, it->it_index++)); + op(_ITER_NEXT_TUPLE, (iter, null_or_index -- iter, null_or_index, next)) { + PyObject *tuple_o = PyStackRef_AsPyObjectBorrow(iter); + assert(Py_TYPE(tuple_o) == &PyTuple_Type); + uintptr_t i = PyStackRef_UntagInt(null_or_index); + assert((size_t)i < (size_t)PyTuple_GET_SIZE(tuple_o)); + next = PyStackRef_FromPyObjectNew(PyTuple_GET_ITEM(tuple_o, i)); + null_or_index = PyStackRef_IncrementTaggedIntNoOverflow(null_or_index); } macro(FOR_ITER_TUPLE) = @@ -3403,7 +3345,7 @@ dummy_func( _ITER_JUMP_TUPLE + _ITER_NEXT_TUPLE; - op(_ITER_CHECK_RANGE, (iter -- iter)) { + op(_ITER_CHECK_RANGE, (iter, null_or_index -- iter, null_or_index)) { _PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter); EXIT_IF(Py_TYPE(r) != &PyRangeIter_Type); #ifdef Py_GIL_DISABLED @@ -3411,7 +3353,7 @@ dummy_func( #endif } - replaced op(_ITER_JUMP_RANGE, (iter -- iter)) { + replaced op(_ITER_JUMP_RANGE, (iter, null_or_index -- iter, null_or_index)) { _PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter); assert(Py_TYPE(r) == &PyRangeIter_Type); #ifdef Py_GIL_DISABLED @@ -3426,13 +3368,13 @@ dummy_func( } // Only used by Tier 2 - op(_GUARD_NOT_EXHAUSTED_RANGE, (iter -- iter)) { + op(_GUARD_NOT_EXHAUSTED_RANGE, (iter, null_or_index -- iter, null_or_index)) { _PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter); assert(Py_TYPE(r) == &PyRangeIter_Type); EXIT_IF(r->len <= 0); } - op(_ITER_NEXT_RANGE, (iter -- iter, next)) { + op(_ITER_NEXT_RANGE, (iter, null_or_index -- iter, null_or_index, next)) { _PyRangeIterObject *r = (_PyRangeIterObject *)PyStackRef_AsPyObjectBorrow(iter); assert(Py_TYPE(r) == &PyRangeIter_Type); #ifdef Py_GIL_DISABLED @@ -3453,7 +3395,7 @@ dummy_func( _ITER_JUMP_RANGE + _ITER_NEXT_RANGE; - op(_FOR_ITER_GEN_FRAME, (iter -- iter, gen_frame: _PyInterpreterFrame*)) { + op(_FOR_ITER_GEN_FRAME, (iter, null -- iter, null, gen_frame: _PyInterpreterFrame*)) { PyGenObject *gen = (PyGenObject *)PyStackRef_AsPyObjectBorrow(iter); DEOPT_IF(Py_TYPE(gen) != &PyGen_Type); #ifdef Py_GIL_DISABLED @@ -4041,6 +3983,15 @@ dummy_func( DEOPT_IF(!PyStackRef_IsNull(null)); } + op(_GUARD_NOS_NOT_NULL, (nos, unused -- nos, unused)) { + PyObject *o = PyStackRef_AsPyObjectBorrow(nos); + EXIT_IF(o == NULL); + } + + op(_GUARD_THIRD_NULL, (null, unused, unused -- null, unused, unused)) { + DEOPT_IF(!PyStackRef_IsNull(null)); + } + op(_GUARD_CALLABLE_TYPE_1, (callable, unused, unused -- callable, unused, unused)) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); DEOPT_IF(callable_o != (PyObject *)&PyType_Type); @@ -4359,41 +4310,57 @@ dummy_func( res = PyStackRef_FromPyObjectSteal(res_o); } - inst(CALL_ISINSTANCE, (unused/1, unused/2, callable, self_or_null, args[oparg] -- res)) { - /* isinstance(o, o2) */ + op(_GUARD_CALLABLE_ISINSTANCE, (callable, unused, unused, unused -- callable, unused, unused, unused)) { PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); - - int total_args = oparg; - _PyStackRef *arguments = args; - if (!PyStackRef_IsNull(self_or_null)) { - arguments--; - total_args++; - } - DEOPT_IF(total_args != 2); PyInterpreterState *interp = tstate->interp; DEOPT_IF(callable_o != interp->callable_cache.isinstance); + } + + op(_CALL_ISINSTANCE, (callable, null, instance, cls -- res)) { + /* isinstance(o, o2) */ STAT_INC(CALL, hit); - _PyStackRef cls_stackref = arguments[1]; - _PyStackRef inst_stackref = arguments[0]; - int retval = PyObject_IsInstance(PyStackRef_AsPyObjectBorrow(inst_stackref), PyStackRef_AsPyObjectBorrow(cls_stackref)); + PyObject *inst_o = PyStackRef_AsPyObjectBorrow(instance); + PyObject *cls_o = PyStackRef_AsPyObjectBorrow(cls); + int retval = PyObject_IsInstance(inst_o, cls_o); if (retval < 0) { ERROR_NO_POP(); } + (void)null; // Silence compiler warnings about unused variables + PyStackRef_CLOSE(cls); + PyStackRef_CLOSE(instance); + DEAD(null); + PyStackRef_CLOSE(callable); res = retval ? PyStackRef_True : PyStackRef_False; assert((!PyStackRef_IsNull(res)) ^ (_PyErr_Occurred(tstate) != NULL)); - DECREF_INPUTS(); + } + + macro(CALL_ISINSTANCE) = + unused/1 + + unused/2 + + _GUARD_THIRD_NULL + + _GUARD_CALLABLE_ISINSTANCE + + _CALL_ISINSTANCE; + + macro(CALL_LIST_APPEND) = + unused/1 + + unused/2 + + _GUARD_CALLABLE_LIST_APPEND + + _GUARD_NOS_NOT_NULL + + _GUARD_NOS_LIST + + _CALL_LIST_APPEND; + + op(_GUARD_CALLABLE_LIST_APPEND, (callable, unused, unused -- callable, unused, unused)){ + PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); + PyInterpreterState *interp = tstate->interp; + DEOPT_IF(callable_o != interp->callable_cache.list_append); } // This is secretly a super-instruction - inst(CALL_LIST_APPEND, (unused/1, unused/2, callable, self, arg -- )) { + op(_CALL_LIST_APPEND, (callable, self, arg -- )) { assert(oparg == 1); - PyObject *callable_o = PyStackRef_AsPyObjectBorrow(callable); PyObject *self_o = PyStackRef_AsPyObjectBorrow(self); - PyInterpreterState *interp = tstate->interp; - DEOPT_IF(callable_o != interp->callable_cache.list_append); - DEOPT_IF(self_o == NULL); - DEOPT_IF(!PyList_Check(self_o)); + DEOPT_IF(!PyList_CheckExact(self_o)); DEOPT_IF(!LOCK_OBJECT(self_o)); STAT_INC(CALL, hit); int err = _PyList_AppendTakeRef((PyListObject *)self_o, PyStackRef_AsPyObjectSteal(arg)); @@ -5303,18 +5270,75 @@ dummy_func( } tier2 pure op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) { - value = PyStackRef_FromPyObjectImmortal(ptr); + value = PyStackRef_FromPyObjectBorrow(ptr); + } + + tier2 op(_POP_CALL, (callable, null --)) { + (void)null; // Silence compiler warnings about unused variables + DEAD(null); + PyStackRef_CLOSE(callable); + } + + tier2 op(_POP_CALL_ONE, (callable, null, pop --)) { + PyStackRef_CLOSE(pop); + (void)null; // Silence compiler warnings about unused variables + DEAD(null); + PyStackRef_CLOSE(callable); + } + + tier2 op(_POP_CALL_TWO, (callable, null, pop1, pop2 --)) { + PyStackRef_CLOSE(pop2); + PyStackRef_CLOSE(pop1); + (void)null; // Silence compiler warnings about unused variables + DEAD(null); + PyStackRef_CLOSE(callable); } - tier2 pure op (_POP_TOP_LOAD_CONST_INLINE_BORROW, (ptr/4, pop -- value)) { + tier2 op(_POP_TOP_LOAD_CONST_INLINE_BORROW, (ptr/4, pop -- value)) { PyStackRef_CLOSE(pop); - value = PyStackRef_FromPyObjectImmortal(ptr); + value = PyStackRef_FromPyObjectBorrow(ptr); } - tier2 pure op(_POP_TWO_LOAD_CONST_INLINE_BORROW, (ptr/4, pop1, pop2 -- value)) { + tier2 op(_POP_TWO_LOAD_CONST_INLINE_BORROW, (ptr/4, pop1, pop2 -- value)) { PyStackRef_CLOSE(pop2); PyStackRef_CLOSE(pop1); - value = PyStackRef_FromPyObjectImmortal(ptr); + value = PyStackRef_FromPyObjectBorrow(ptr); + } + + tier2 op(_POP_CALL_LOAD_CONST_INLINE_BORROW, (ptr/4, callable, null -- value)) { + (void)null; // Silence compiler warnings about unused variables + DEAD(null); + PyStackRef_CLOSE(callable); + value = PyStackRef_FromPyObjectBorrow(ptr); + } + + tier2 op(_POP_CALL_ONE_LOAD_CONST_INLINE_BORROW, (ptr/4, callable, null, pop -- value)) { + PyStackRef_CLOSE(pop); + (void)null; // Silence compiler warnings about unused variables + DEAD(null); + PyStackRef_CLOSE(callable); + value = PyStackRef_FromPyObjectBorrow(ptr); + } + + tier2 op(_POP_CALL_TWO_LOAD_CONST_INLINE_BORROW, (ptr/4, callable, null, pop1, pop2 -- value)) { + PyStackRef_CLOSE(pop2); + PyStackRef_CLOSE(pop1); + (void)null; // Silence compiler warnings about unused variables + DEAD(null); + PyStackRef_CLOSE(callable); + value = PyStackRef_FromPyObjectBorrow(ptr); + } + + tier2 op(_LOAD_CONST_UNDER_INLINE, (ptr/4, old -- value, new)) { + new = old; + DEAD(old); + value = PyStackRef_FromPyObjectNew(ptr); + } + + tier2 op(_LOAD_CONST_UNDER_INLINE_BORROW, (ptr/4, old -- value, new)) { + new = old; + DEAD(old); + value = PyStackRef_FromPyObjectBorrow(ptr); } tier2 op(_CHECK_FUNCTION, (func_version/2 -- )) { |