diff options
Diffstat (limited to 'Python/compile.c')
-rw-r--r-- | Python/compile.c | 49 |
1 files changed, 29 insertions, 20 deletions
diff --git a/Python/compile.c b/Python/compile.c index c92b267dd03..f04ba9ec50f 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -77,8 +77,9 @@ #define SETUP_WITH -3 #define POP_BLOCK -4 #define JUMP -5 +#define JUMP_NO_INTERRUPT -6 -#define MIN_VIRTUAL_OPCODE -5 +#define MIN_VIRTUAL_OPCODE -6 #define MAX_ALLOWED_OPCODE 254 #define IS_WITHIN_OPCODE_RANGE(opcode) \ @@ -86,6 +87,13 @@ #define IS_VIRTUAL_OPCODE(opcode) ((opcode) < 0) +/* opcodes which are not emitted in codegen stage, only by the assembler */ +#define IS_ASSEMBLER_OPCODE(opcode) \ + ((opcode) == JUMP_FORWARD || \ + (opcode) == JUMP_BACKWARD || \ + (opcode) == JUMP_BACKWARD_NO_INTERRUPT) + + #define IS_TOP_LEVEL_AWAIT(c) ( \ (c->c_flags->cf_flags & PyCF_ALLOW_TOP_LEVEL_AWAIT) \ && (c->u->u_ste->ste_type == ModuleBlock)) @@ -1011,6 +1019,7 @@ stack_effect(int opcode, int oparg, int jump) case JUMP_FORWARD: case JUMP_BACKWARD: case JUMP: + case JUMP_BACKWARD_NO_INTERRUPT: case JUMP_NO_INTERRUPT: return 0; @@ -1199,6 +1208,7 @@ compiler_addop_line(struct compiler *c, int opcode, int line, int end_line, int col_offset, int end_col_offset) { assert(IS_WITHIN_OPCODE_RANGE(opcode)); + assert(!IS_ASSEMBLER_OPCODE(opcode)); assert(!HAS_ARG(opcode) || IS_ARTIFICIAL(opcode)); if (compiler_use_new_implicit_block_if_needed(c) < 0) { @@ -1442,6 +1452,7 @@ compiler_addop_i_line(struct compiler *c, int opcode, Py_ssize_t oparg, EXTENDED_ARG is used for 16, 24, and 32-bit arguments. */ assert(IS_WITHIN_OPCODE_RANGE(opcode)); + assert(!IS_ASSEMBLER_OPCODE(opcode)); assert(HAS_ARG(opcode)); assert(0 <= oparg && oparg <= 2147483647); @@ -1486,6 +1497,7 @@ static int add_jump_to_block(struct compiler *c, int opcode, basicblock *target) { assert(IS_WITHIN_OPCODE_RANGE(opcode)); + assert(!IS_ASSEMBLER_OPCODE(opcode)); assert(HAS_ARG(opcode) || IS_VIRTUAL_OPCODE(opcode)); assert(target != NULL); @@ -7089,8 +7101,7 @@ stackdepth(struct compiler *c) stackdepth_push(&sp, instr->i_target, target_depth); } depth = new_depth; - assert(instr->i_opcode != JUMP_FORWARD); - assert(instr->i_opcode != JUMP_BACKWARD); + assert(!IS_ASSEMBLER_OPCODE(instr->i_opcode)); if (instr->i_opcode == JUMP_NO_INTERRUPT || instr->i_opcode == JUMP || instr->i_opcode == RETURN_VALUE || @@ -7597,15 +7608,15 @@ normalize_jumps(struct assembler *a) continue; } struct instr *last = &b->b_instr[b->b_iused-1]; - assert(last->i_opcode != JUMP_FORWARD); - assert(last->i_opcode != JUMP_BACKWARD); + assert(!IS_ASSEMBLER_OPCODE(last->i_opcode)); if (last->i_opcode == JUMP) { - if (last->i_target->b_visited == 0) { - last->i_opcode = JUMP_FORWARD; - } - else { - last->i_opcode = JUMP_BACKWARD; - } + bool is_forward = last->i_target->b_visited == 0; + last->i_opcode = is_forward ? JUMP_FORWARD : JUMP_BACKWARD; + } + if (last->i_opcode == JUMP_NO_INTERRUPT) { + bool is_forward = last->i_target->b_visited == 0; + last->i_opcode = is_forward ? + JUMP_FORWARD : JUMP_BACKWARD_NO_INTERRUPT; } } } @@ -7641,11 +7652,13 @@ assemble_jump_offsets(struct assembler *a, struct compiler *c) instr->i_oparg = instr->i_target->b_offset; if (is_relative_jump(instr)) { if (instr->i_oparg < bsize) { - assert(instr->i_opcode == JUMP_BACKWARD); + assert(instr->i_opcode == JUMP_BACKWARD || + instr->i_opcode == JUMP_BACKWARD_NO_INTERRUPT); instr->i_oparg = bsize - instr->i_oparg; } else { assert(instr->i_opcode != JUMP_BACKWARD); + assert(instr->i_opcode != JUMP_BACKWARD_NO_INTERRUPT); instr->i_oparg -= bsize; } } @@ -8667,14 +8680,12 @@ optimize_basic_block(struct compiler *c, basicblock *bb, PyObject *consts) inst->i_target = inst->i_target->b_next; } target = &inst->i_target->b_instr[0]; - assert(target->i_opcode != JUMP_FORWARD); - assert(target->i_opcode != JUMP_BACKWARD); + assert(!IS_ASSEMBLER_OPCODE(target->i_opcode)); } else { target = &nop; } - assert(inst->i_opcode != JUMP_FORWARD); - assert(inst->i_opcode != JUMP_BACKWARD); + assert(!IS_ASSEMBLER_OPCODE(inst->i_opcode)); switch (inst->i_opcode) { /* Remove LOAD_CONST const; conditional jump */ case LOAD_CONST: @@ -8975,8 +8986,7 @@ normalize_basic_block(basicblock *bb) { /* Mark blocks as exit and/or nofallthrough. Raise SystemError if CFG is malformed. */ for (int i = 0; i < bb->b_iused; i++) { - assert(bb->b_instr[i].i_opcode != JUMP_FORWARD); - assert(bb->b_instr[i].i_opcode != JUMP_BACKWARD); + assert(!IS_ASSEMBLER_OPCODE(bb->b_instr[i].i_opcode)); switch(bb->b_instr[i].i_opcode) { case RETURN_VALUE: case RAISE_VARARGS: @@ -9163,8 +9173,7 @@ optimize_cfg(struct compiler *c, struct assembler *a, PyObject *consts) for (basicblock *b = a->a_entry; b != NULL; b = b->b_next) { if (b->b_iused > 0) { struct instr *b_last_instr = &b->b_instr[b->b_iused - 1]; - assert(b_last_instr->i_opcode != JUMP_FORWARD); - assert(b_last_instr->i_opcode != JUMP_BACKWARD); + assert(!IS_ASSEMBLER_OPCODE(b_last_instr->i_opcode)); if (b_last_instr->i_opcode == JUMP || b_last_instr->i_opcode == JUMP_NO_INTERRUPT) { if (b_last_instr->i_target == b->b_next) { |