aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Python
diff options
context:
space:
mode:
authorGuido van Rossum <guido@python.org>2023-07-17 12:12:33 -0700
committerGitHub <noreply@github.com>2023-07-17 12:12:33 -0700
commit8e9a1a032233f06ce0f1acdf5f983d614c8745a5 (patch)
treed9a31c1c33b6b363d12f5e258a169835dbf29cd6 /Python
parent7e96370a946a2ca0f2f25af4ce5b3b59f020721b (diff)
downloadcpython-8e9a1a032233f06ce0f1acdf5f983d614c8745a5.tar.gz
cpython-8e9a1a032233f06ce0f1acdf5f983d614c8745a5.zip
gh-106603: Make uop struct a triple (opcode, oparg, operand) (#106794)
Diffstat (limited to 'Python')
-rw-r--r--Python/bytecodes.c16
-rw-r--r--Python/ceval.c7
-rw-r--r--Python/executor_cases.c.h85
-rw-r--r--Python/generated_cases.c.h14
-rw-r--r--Python/optimizer.c91
5 files changed, 145 insertions, 68 deletions
diff --git a/Python/bytecodes.c b/Python/bytecodes.c
index 652372cb23d..19fb138ee64 100644
--- a/Python/bytecodes.c
+++ b/Python/bytecodes.c
@@ -645,18 +645,16 @@ dummy_func(
STORE_SUBSCR_LIST_INT,
};
- inst(STORE_SUBSCR, (counter/1, v, container, sub -- )) {
+ inst(STORE_SUBSCR, (unused/1, v, container, sub -- )) {
#if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
- _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
- #else
- (void)counter; // Unused.
#endif /* ENABLE_SPECIALIZATION */
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
@@ -1198,19 +1196,17 @@ dummy_func(
STORE_ATTR_WITH_HINT,
};
- inst(STORE_ATTR, (counter/1, unused/3, v, owner --)) {
+ inst(STORE_ATTR, (unused/1, unused/3, v, owner --)) {
#if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ _PyAttrCache *cache = (_PyAttrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
- #else
- (void)counter; // Unused.
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
int err = PyObject_SetAttr(owner, name, v);
diff --git a/Python/ceval.c b/Python/ceval.c
index f13ba9883d9..b56ddfb4bd2 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -2747,17 +2747,18 @@ _PyUopExecute(_PyExecutorObject *executor, _PyInterpreterFrame *frame, PyObject
_Py_CODEUNIT *ip_offset = (_Py_CODEUNIT *)_PyFrame_GetCode(frame)->co_code_adaptive;
int pc = 0;
int opcode;
- uint64_t operand;
int oparg;
+ uint64_t operand;
for (;;) {
opcode = self->trace[pc].opcode;
+ oparg = self->trace[pc].oparg;
operand = self->trace[pc].operand;
- oparg = (int)operand;
DPRINTF(3,
- "%4d: uop %s, operand %" PRIu64 ", stack_level %d\n",
+ "%4d: uop %s, oparg %d, operand %" PRIu64 ", stack_level %d\n",
pc,
opcode < 256 ? _PyOpcode_OpName[opcode] : _PyOpcode_uop_name[opcode],
+ oparg,
operand,
(int)(stack_pointer - _PyFrame_Stackbase(frame)));
pc++;
diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h
index d85e23b5abb..f492c1fa9d8 100644
--- a/Python/executor_cases.c.h
+++ b/Python/executor_cases.c.h
@@ -485,18 +485,15 @@
PyObject *sub = stack_pointer[-1];
PyObject *container = stack_pointer[-2];
PyObject *v = stack_pointer[-3];
- uint16_t counter = (uint16_t)operand;
#if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
- _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
- #else
- (void)counter; // Unused.
#endif /* ENABLE_SPECIALIZATION */
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
@@ -849,6 +846,30 @@
break;
}
+ case STORE_ATTR: {
+ static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size");
+ PyObject *owner = stack_pointer[-1];
+ PyObject *v = stack_pointer[-2];
+ #if ENABLE_SPECIALIZATION
+ _PyAttrCache *cache = (_PyAttrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
+ next_instr--;
+ _Py_Specialize_StoreAttr(owner, next_instr, name);
+ DISPATCH_SAME_OPARG();
+ }
+ STAT_INC(STORE_ATTR, deferred);
+ DECREMENT_ADAPTIVE_COUNTER(cache->counter);
+ #endif /* ENABLE_SPECIALIZATION */
+ PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
+ int err = PyObject_SetAttr(owner, name, v);
+ Py_DECREF(v);
+ Py_DECREF(owner);
+ if (err) goto pop_2_error;
+ STACK_SHRINK(2);
+ break;
+ }
+
case DELETE_ATTR: {
PyObject *owner = stack_pointer[-1];
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
@@ -1010,6 +1031,42 @@
break;
}
+ case _LOAD_GLOBAL_MODULE: {
+ PyObject *null = NULL;
+ PyObject *res;
+ uint16_t index = (uint16_t)operand;
+ PyDictObject *dict = (PyDictObject *)GLOBALS();
+ PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(dict->ma_keys);
+ res = entries[index].me_value;
+ DEOPT_IF(res == NULL, LOAD_GLOBAL);
+ Py_INCREF(res);
+ STAT_INC(LOAD_GLOBAL, hit);
+ null = NULL;
+ STACK_GROW(1);
+ STACK_GROW(((oparg & 1) ? 1 : 0));
+ stack_pointer[-1] = res;
+ if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; }
+ break;
+ }
+
+ case _LOAD_GLOBAL_BUILTINS: {
+ PyObject *null = NULL;
+ PyObject *res;
+ uint16_t index = (uint16_t)operand;
+ PyDictObject *bdict = (PyDictObject *)BUILTINS();
+ PyDictUnicodeEntry *entries = DK_UNICODE_ENTRIES(bdict->ma_keys);
+ res = entries[index].me_value;
+ DEOPT_IF(res == NULL, LOAD_GLOBAL);
+ Py_INCREF(res);
+ STAT_INC(LOAD_GLOBAL, hit);
+ null = NULL;
+ STACK_GROW(1);
+ STACK_GROW(((oparg & 1) ? 1 : 0));
+ stack_pointer[-1] = res;
+ if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = null; }
+ break;
+ }
+
case DELETE_FAST: {
PyObject *v = GETLOCAL(oparg);
if (v == NULL) goto unbound_local_error;
@@ -1443,6 +1500,24 @@
break;
}
+ case _LOAD_ATTR_INSTANCE_VALUE: {
+ PyObject *owner = stack_pointer[-1];
+ PyObject *res2 = NULL;
+ PyObject *res;
+ uint16_t index = (uint16_t)operand;
+ PyDictOrValues dorv = *_PyObject_DictOrValuesPointer(owner);
+ res = _PyDictOrValues_GetValues(dorv)->values[index];
+ DEOPT_IF(res == NULL, LOAD_ATTR);
+ STAT_INC(LOAD_ATTR, hit);
+ Py_INCREF(res);
+ res2 = NULL;
+ Py_DECREF(owner);
+ STACK_GROW(((oparg & 1) ? 1 : 0));
+ stack_pointer[-1] = res;
+ if (oparg & 1) { stack_pointer[-(1 + ((oparg & 1) ? 1 : 0))] = res2; }
+ break;
+ }
+
case COMPARE_OP: {
static_assert(INLINE_CACHE_ENTRIES_COMPARE_OP == 1, "incorrect cache size");
PyObject *right = stack_pointer[-1];
diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h
index 1fd76715dc3..0148078d18b 100644
--- a/Python/generated_cases.c.h
+++ b/Python/generated_cases.c.h
@@ -773,18 +773,15 @@
PyObject *sub = stack_pointer[-1];
PyObject *container = stack_pointer[-2];
PyObject *v = stack_pointer[-3];
- uint16_t counter = read_u16(&next_instr[0].cache);
#if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
next_instr--;
_Py_Specialize_StoreSubscr(container, sub, next_instr);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_SUBSCR, deferred);
- _PyStoreSubscrCache *cache = (_PyStoreSubscrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
- #else
- (void)counter; // Unused.
#endif /* ENABLE_SPECIALIZATION */
/* container[sub] = v */
int err = PyObject_SetItem(container, sub, v);
@@ -1437,19 +1434,16 @@
static_assert(INLINE_CACHE_ENTRIES_STORE_ATTR == 4, "incorrect cache size");
PyObject *owner = stack_pointer[-1];
PyObject *v = stack_pointer[-2];
- uint16_t counter = read_u16(&next_instr[0].cache);
#if ENABLE_SPECIALIZATION
- if (ADAPTIVE_COUNTER_IS_ZERO(counter)) {
+ _PyAttrCache *cache = (_PyAttrCache *)next_instr;
+ if (ADAPTIVE_COUNTER_IS_ZERO(cache->counter)) {
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
next_instr--;
_Py_Specialize_StoreAttr(owner, next_instr, name);
DISPATCH_SAME_OPARG();
}
STAT_INC(STORE_ATTR, deferred);
- _PyAttrCache *cache = (_PyAttrCache *)next_instr;
DECREMENT_ADAPTIVE_COUNTER(cache->counter);
- #else
- (void)counter; // Unused.
#endif /* ENABLE_SPECIALIZATION */
PyObject *name = GETITEM(FRAME_CO_NAMES, oparg);
int err = PyObject_SetAttr(owner, name, v);
diff --git a/Python/optimizer.c b/Python/optimizer.c
index 693ba375971..3d385a1506c 100644
--- a/Python/optimizer.c
+++ b/Python/optimizer.c
@@ -344,13 +344,19 @@ uop_item(_PyUOpExecutorObject *self, Py_ssize_t index)
if (oname == NULL) {
return NULL;
}
+ PyObject *oparg = PyLong_FromUnsignedLong(self->trace[index].oparg);
+ if (oparg == NULL) {
+ Py_DECREF(oname);
+ return NULL;
+ }
PyObject *operand = PyLong_FromUnsignedLongLong(self->trace[index].operand);
if (operand == NULL) {
+ Py_DECREF(oparg);
Py_DECREF(oname);
return NULL;
}
- PyObject *args[2] = { oname, operand };
- return _PyTuple_FromArraySteal(args, 2);
+ PyObject *args[3] = { oname, oparg, operand };
+ return _PyTuple_FromArraySteal(args, 3);
}
PySequenceMethods uop_as_sequence = {
@@ -395,29 +401,33 @@ translate_bytecode_to_trace(
#define DPRINTF(level, ...)
#endif
-#define ADD_TO_TRACE(OPCODE, OPERAND) \
+#define ADD_TO_TRACE(OPCODE, OPARG, OPERAND) \
DPRINTF(2, \
- " ADD_TO_TRACE(%s, %" PRIu64 ")\n", \
+ " ADD_TO_TRACE(%s, %d, %" PRIu64 ")\n", \
uop_name(OPCODE), \
+ (OPARG), \
(uint64_t)(OPERAND)); \
assert(trace_length < max_length); \
assert(reserved > 0); \
reserved--; \
trace[trace_length].opcode = (OPCODE); \
+ trace[trace_length].oparg = (OPARG); \
trace[trace_length].operand = (OPERAND); \
trace_length++;
#define INSTR_IP(INSTR, CODE) \
- ((long)((INSTR) - ((_Py_CODEUNIT *)(CODE)->co_code_adaptive)))
+ ((uint32_t)((INSTR) - ((_Py_CODEUNIT *)(CODE)->co_code_adaptive)))
-#define ADD_TO_STUB(INDEX, OPCODE, OPERAND) \
- DPRINTF(2, " ADD_TO_STUB(%d, %s, %" PRIu64 ")\n", \
+#define ADD_TO_STUB(INDEX, OPCODE, OPARG, OPERAND) \
+ DPRINTF(2, " ADD_TO_STUB(%d, %s, %d, %" PRIu64 ")\n", \
(INDEX), \
uop_name(OPCODE), \
+ (OPARG), \
(uint64_t)(OPERAND)); \
assert(reserved > 0); \
reserved--; \
trace[(INDEX)].opcode = (OPCODE); \
+ trace[(INDEX)].oparg = (OPARG); \
trace[(INDEX)].operand = (OPERAND);
// Reserve space for n uops
@@ -433,7 +443,7 @@ translate_bytecode_to_trace(
#define RESERVE(main, stub) RESERVE_RAW((main) + (stub) + 2, uop_name(opcode))
DPRINTF(4,
- "Optimizing %s (%s:%d) at byte offset %ld\n",
+ "Optimizing %s (%s:%d) at byte offset %d\n",
PyUnicode_AsUTF8(code->co_qualname),
PyUnicode_AsUTF8(code->co_filename),
code->co_firstlineno,
@@ -441,11 +451,11 @@ translate_bytecode_to_trace(
for (;;) {
RESERVE_RAW(2, "epilogue"); // Always need space for SAVE_IP and EXIT_TRACE
- ADD_TO_TRACE(SAVE_IP, INSTR_IP(instr, code));
+ ADD_TO_TRACE(SAVE_IP, INSTR_IP(instr, code), 0);
- int opcode = instr->op.code;
- int oparg = instr->op.arg;
- int extras = 0;
+ uint32_t opcode = instr->op.code;
+ uint32_t oparg = instr->op.arg;
+ uint32_t extras = 0;
while (opcode == EXTENDED_ARG) {
instr++;
@@ -467,7 +477,7 @@ translate_bytecode_to_trace(
case POP_JUMP_IF_NONE:
{
RESERVE(2, 2);
- ADD_TO_TRACE(IS_NONE, 0);
+ ADD_TO_TRACE(IS_NONE, 0, 0);
opcode = POP_JUMP_IF_TRUE;
goto pop_jump_if_bool;
}
@@ -475,7 +485,7 @@ translate_bytecode_to_trace(
case POP_JUMP_IF_NOT_NONE:
{
RESERVE(2, 2);
- ADD_TO_TRACE(IS_NONE, 0);
+ ADD_TO_TRACE(IS_NONE, 0, 0);
opcode = POP_JUMP_IF_FALSE;
goto pop_jump_if_bool;
}
@@ -489,11 +499,11 @@ pop_jump_if_bool:
_Py_CODEUNIT *target_instr =
instr + 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]] + oparg;
max_length -= 2; // Really the start of the stubs
- int uopcode = opcode == POP_JUMP_IF_TRUE ?
+ uint32_t uopcode = opcode == POP_JUMP_IF_TRUE ?
_POP_JUMP_IF_TRUE : _POP_JUMP_IF_FALSE;
- ADD_TO_TRACE(uopcode, max_length);
- ADD_TO_STUB(max_length, SAVE_IP, INSTR_IP(target_instr, code));
- ADD_TO_STUB(max_length + 1, EXIT_TRACE, 0);
+ ADD_TO_TRACE(uopcode, max_length, 0);
+ ADD_TO_STUB(max_length, SAVE_IP, INSTR_IP(target_instr, code), 0);
+ ADD_TO_STUB(max_length + 1, EXIT_TRACE, 0, 0);
break;
}
@@ -501,7 +511,7 @@ pop_jump_if_bool:
{
if (instr + 2 - oparg == initial_instr) {
RESERVE(1, 0);
- ADD_TO_TRACE(JUMP_TO_TOP, 0);
+ ADD_TO_TRACE(JUMP_TO_TOP, 0, 0);
}
else {
DPRINTF(2, "JUMP_BACKWARD not to top ends trace\n");
@@ -546,14 +556,14 @@ pop_jump_if_bool:
_Py_CODEUNIT *target_instr = // +1 at the end skips over END_FOR
instr + 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]] + oparg + 1;
max_length -= 3; // Really the start of the stubs
- ADD_TO_TRACE(check_op, 0);
- ADD_TO_TRACE(exhausted_op, 0);
- ADD_TO_TRACE(_POP_JUMP_IF_TRUE, max_length);
- ADD_TO_TRACE(next_op, 0);
-
- ADD_TO_STUB(max_length + 0, POP_TOP, 0);
- ADD_TO_STUB(max_length + 1, SAVE_IP, INSTR_IP(target_instr, code));
- ADD_TO_STUB(max_length + 2, EXIT_TRACE, 0);
+ ADD_TO_TRACE(check_op, 0, 0);
+ ADD_TO_TRACE(exhausted_op, 0, 0);
+ ADD_TO_TRACE(_POP_JUMP_IF_TRUE, max_length, 0);
+ ADD_TO_TRACE(next_op, 0, 0);
+
+ ADD_TO_STUB(max_length + 0, POP_TOP, 0, 0);
+ ADD_TO_STUB(max_length + 1, SAVE_IP, INSTR_IP(target_instr, code), 0);
+ ADD_TO_STUB(max_length + 2, EXIT_TRACE, 0, 0);
break;
}
@@ -564,19 +574,20 @@ pop_jump_if_bool:
// Reserve space for nuops (+ SAVE_IP + EXIT_TRACE)
int nuops = expansion->nuops;
RESERVE(nuops, 0);
+ uint32_t orig_oparg = oparg; // For OPARG_TOP/BOTTOM
for (int i = 0; i < nuops; i++) {
- uint64_t operand;
+ oparg = orig_oparg;
+ uint64_t operand = 0;
int offset = expansion->uops[i].offset;
switch (expansion->uops[i].size) {
case OPARG_FULL:
- operand = oparg;
if (extras && OPCODE_HAS_JUMP(opcode)) {
if (opcode == JUMP_BACKWARD_NO_INTERRUPT) {
- operand -= extras;
+ oparg -= extras;
}
else {
assert(opcode != JUMP_BACKWARD);
- operand += extras;
+ oparg += extras;
}
}
break;
@@ -590,10 +601,10 @@ pop_jump_if_bool:
operand = read_u64(&instr[offset].cache);
break;
case OPARG_TOP: // First half of super-instr
- operand = oparg >> 4;
+ oparg = orig_oparg >> 4;
break;
case OPARG_BOTTOM: // Second half of super-instr
- operand = oparg & 0xF;
+ oparg = orig_oparg & 0xF;
break;
default:
fprintf(stderr,
@@ -603,7 +614,7 @@ pop_jump_if_bool:
expansion->uops[i].offset);
Py_FatalError("garbled expansion");
}
- ADD_TO_TRACE(expansion->uops[i].uop, operand);
+ ADD_TO_TRACE(expansion->uops[i].uop, oparg, operand);
}
break;
}
@@ -621,9 +632,9 @@ pop_jump_if_bool:
done:
// Skip short traces like SAVE_IP, LOAD_FAST, SAVE_IP, EXIT_TRACE
if (trace_length > 3) {
- ADD_TO_TRACE(EXIT_TRACE, 0);
+ ADD_TO_TRACE(EXIT_TRACE, 0, 0);
DPRINTF(1,
- "Created a trace for %s (%s:%d) at byte offset %ld -- length %d\n",
+ "Created a trace for %s (%s:%d) at byte offset %d -- length %d\n",
PyUnicode_AsUTF8(code->co_qualname),
PyUnicode_AsUTF8(code->co_filename),
code->co_firstlineno,
@@ -644,10 +655,10 @@ done:
if (trace[i].opcode == _POP_JUMP_IF_FALSE ||
trace[i].opcode == _POP_JUMP_IF_TRUE)
{
- uint64_t target = trace[i].operand;
- if (target >= (uint64_t)max_length) {
+ int target = trace[i].oparg;
+ if (target >= max_length) {
target += trace_length - max_length;
- trace[i].operand = target;
+ trace[i].oparg = target;
}
}
}
@@ -657,7 +668,7 @@ done:
}
else {
DPRINTF(4,
- "No trace for %s (%s:%d) at byte offset %ld\n",
+ "No trace for %s (%s:%d) at byte offset %d\n",
PyUnicode_AsUTF8(code->co_qualname),
PyUnicode_AsUTF8(code->co_filename),
code->co_firstlineno,