summaryrefslogtreecommitdiffstatshomepage
path: root/py/emitnative.c
diff options
context:
space:
mode:
authorDamien George <damien.p.george@gmail.com>2018-09-13 22:03:48 +1000
committerDamien George <damien.p.george@gmail.com>2018-09-13 22:47:20 +1000
commit4f3d9429b54ccc2d36123c861cd916b1ee15c640 (patch)
tree864a80337a727a02085128882fea454a0d0700d5 /py/emitnative.c
parent9fb1f18cf450216e30d28ccd246a596555b09f87 (diff)
downloadmicropython-4f3d9429b54ccc2d36123c861cd916b1ee15c640.tar.gz
micropython-4f3d9429b54ccc2d36123c861cd916b1ee15c640.zip
py: Fix native functions so they run with their correct globals context.
Prior to this commit a function compiled with the native decorator @micropython.native would not work correctly when accessing global variables, because the globals dict was not being set upon function entry. This commit fixes this problem by, upon function entry, setting as the current globals dict the globals dict context the function was defined within, as per normal Python semantics, and as bytecode does. Upon function exit the original globals dict is restored. In order to restore the globals dict when an exception is raised the native function must guard its internals with an nlr_push/nlr_pop pair. Because this push/pop is relatively expensive, in both C stack usage for the nlr_buf_t and CPU execution time, the implementation here optimises things as much as possible. First, the compiler keeps track of whether a function even needs to access global variables. Using this information the native emitter then generates three different kinds of code: 1. no globals used, no exception handlers: no nlr handling code and no setting of the globals dict. 2. globals used, no exception handlers: an nlr_buf_t is allocated on the C stack but it is not used if the globals dict is unchanged, saving execution time because nlr_push/nlr_pop don't need to run. 3. function has exception handlers, may use globals: an nlr_buf_t is allocated and nlr_push/nlr_pop are always called. In the end, native functions that don't access globals and don't have exception handlers will run more efficiently than those that do. Fixes issue #1573.
Diffstat (limited to 'py/emitnative.c')
-rw-r--r--py/emitnative.c87
1 files changed, 66 insertions, 21 deletions
diff --git a/py/emitnative.c b/py/emitnative.c
index 73899b9e90..eb402c06b0 100644
--- a/py/emitnative.c
+++ b/py/emitnative.c
@@ -75,7 +75,8 @@
#define NLR_BUF_IDX_RET_VAL (1)
// Whether the native/viper function needs to be wrapped in an exception handler
-#define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0)
+#define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
+ || (!(emit)->do_viper_types && ((emit)->scope->scope_flags & MP_SCOPE_FLAG_REFGLOBALS)))
// Whether registers can be used to store locals (only true if there are no
// exception handlers, because otherwise an nlr_jump will restore registers to
@@ -928,30 +929,56 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) {
mp_uint_t start_label = *emit->label_slot + 2;
mp_uint_t global_except_label = *emit->label_slot + 3;
- // Clear the unwind state
- ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
- ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_TEMP0);
+ if (!emit->do_viper_types) {
+ // Set new globals
+ ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t));
+ ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, offsetof(mp_obj_fun_bc_t, globals) / sizeof(uintptr_t));
+ emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
- // Put PC of start code block into REG_LOCAL_1
- ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
+ // Save old globals (or NULL if globals didn't change)
+ ASM_MOV_LOCAL_REG(emit->as, offsetof(mp_code_state_t, old_globals) / sizeof(uintptr_t), REG_RET);
+ }
- // Wrap everything in an nlr context
- emit_native_label_assign(emit, nlr_label);
- ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_2, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
- emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1, sizeof(nlr_buf_t) / sizeof(uintptr_t));
- emit_call(emit, MP_F_NLR_PUSH);
- ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_LOCAL_2);
- ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
+ if (emit->scope->exc_stack_size == 0) {
+ // Optimisation: if globals didn't change don't push the nlr context
+ ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
- // Clear PC of current code block, and jump there to resume execution
- ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
- ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_TEMP0);
- ASM_JUMP_REG(emit->as, REG_LOCAL_1);
+ // Wrap everything in an nlr context
+ emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1, sizeof(nlr_buf_t) / sizeof(uintptr_t));
+ emit_call(emit, MP_F_NLR_PUSH);
+ ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, true);
+ } else {
+ // Clear the unwind state
+ ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
+ ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_TEMP0);
+
+ // Put PC of start code block into REG_LOCAL_1
+ ASM_MOV_REG_PCREL(emit->as, REG_LOCAL_1, start_label);
+
+ // Wrap everything in an nlr context
+ emit_native_label_assign(emit, nlr_label);
+ ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_2, LOCAL_IDX_EXC_HANDLER_UNWIND(emit));
+ emit_get_stack_pointer_to_reg_for_push(emit, REG_ARG_1, sizeof(nlr_buf_t) / sizeof(uintptr_t));
+ emit_call(emit, MP_F_NLR_PUSH);
+ ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_UNWIND(emit), REG_LOCAL_2);
+ ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, global_except_label, true);
+
+ // Clear PC of current code block, and jump there to resume execution
+ ASM_XOR_REG_REG(emit->as, REG_TEMP0, REG_TEMP0);
+ ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_TEMP0);
+ ASM_JUMP_REG(emit->as, REG_LOCAL_1);
+
+ // Global exception handler: check for valid exception handler
+ emit_native_label_assign(emit, global_except_label);
+ ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
+ ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
+ }
- // Global exception handler: check for valid exception handler
- emit_native_label_assign(emit, global_except_label);
- ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
- ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
+ if (!emit->do_viper_types) {
+ // Restore old globals
+ ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, offsetof(mp_code_state_t, old_globals) / sizeof(uintptr_t));
+ emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
+ }
// Re-raise exception out to caller
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
@@ -967,10 +994,28 @@ STATIC void emit_native_global_exc_exit(emit_t *emit) {
emit_native_label_assign(emit, emit->exit_label);
if (NEED_GLOBAL_EXC_HANDLER(emit)) {
+ if (!emit->do_viper_types) {
+ // Get old globals
+ ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, offsetof(mp_code_state_t, old_globals) / sizeof(uintptr_t));
+
+ if (emit->scope->exc_stack_size == 0) {
+ // Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
+ ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
+ }
+
+ // Restore old globals
+ emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
+ }
+
// Pop the nlr context
emit_call(emit, MP_F_NLR_POP);
adjust_stack(emit, -(mp_int_t)(sizeof(nlr_buf_t) / sizeof(uintptr_t)));
+ if (emit->scope->exc_stack_size == 0) {
+ // Destination label for above optimisation
+ emit_native_label_assign(emit, emit->exit_label + 1);
+ }
+
// Load return value
ASM_MOV_REG_LOCAL(emit->as, REG_RET, LOCAL_IDX_RET_VAL(emit));
}