summaryrefslogtreecommitdiffstatshomepage
path: root/py
diff options
context:
space:
mode:
authorDamien George <damien.p.george@gmail.com>2014-08-24 16:28:17 +0100
committerDamien George <damien.p.george@gmail.com>2014-08-24 16:28:17 +0100
commit3c658a4e755a75e495303957208486e583ddb270 (patch)
tree6418fea9bf3dcf4aed2145db94fda4c0de1d0321 /py
parent25fc41dd316c38df3e2a6cfe4b53322d76dc92fc (diff)
downloadmicropython-3c658a4e755a75e495303957208486e583ddb270.tar.gz
micropython-3c658a4e755a75e495303957208486e583ddb270.zip
py: Fix bug where GC collected native/viper/asm function data.
Because (for Thumb) a function pointer has the LSB set, pointers to dynamic functions in RAM (eg native, viper or asm functions) were not being traced by the GC. This patch is a comprehensive fix for this. Addresses issue #820.
Diffstat (limited to 'py')
-rw-r--r--py/asmthumb.c3
-rw-r--r--py/bc.h2
-rw-r--r--py/emitglue.c27
-rw-r--r--py/emitglue.h16
-rw-r--r--py/mpconfig.h6
-rw-r--r--py/obj.c2
-rw-r--r--py/obj.h31
-rw-r--r--py/objfun.c174
-rw-r--r--py/objtype.c2
-rw-r--r--py/runtime.h4
10 files changed, 162 insertions, 105 deletions
diff --git a/py/asmthumb.c b/py/asmthumb.c
index 75ce168f73..1102bb74ab 100644
--- a/py/asmthumb.c
+++ b/py/asmthumb.c
@@ -132,8 +132,7 @@ uint asm_thumb_get_code_size(asm_thumb_t *as) {
}
void *asm_thumb_get_code(asm_thumb_t *as) {
- // need to set low bit to indicate that it's thumb code
- return (void *)(((mp_uint_t)as->code_base) | 1);
+ return as->code_base;
}
/*
diff --git a/py/bc.h b/py/bc.h
index 3704dd7c7c..4793174997 100644
--- a/py/bc.h
+++ b/py/bc.h
@@ -50,7 +50,7 @@ typedef struct _mp_code_state {
} mp_code_state;
mp_vm_return_kind_t mp_execute_bytecode(mp_code_state *code_state, volatile mp_obj_t inject_exc);
-void mp_setup_code_state(mp_code_state *code_state, mp_obj_t self_in, uint n_args, uint n_kw, const mp_obj_t *args);
+void mp_setup_code_state(mp_code_state *code_state, mp_obj_t self_in, mp_uint_t n_args, mp_uint_t n_kw, const mp_obj_t *args);
void mp_bytecode_print(const void *descr, const byte *code, int len);
void mp_bytecode_print2(const byte *code, int len);
diff --git a/py/emitglue.c b/py/emitglue.c
index 5be54a6fc5..5916586aea 100644
--- a/py/emitglue.c
+++ b/py/emitglue.c
@@ -55,7 +55,7 @@ mp_raw_code_t *mp_emit_glue_new_raw_code(void) {
return rc;
}
-void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint n_pos_args, uint n_kwonly_args, qstr *arg_names, uint scope_flags) {
+void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, mp_uint_t len, mp_uint_t n_pos_args, mp_uint_t n_kwonly_args, qstr *arg_names, mp_uint_t scope_flags) {
rc->kind = MP_CODE_BYTECODE;
rc->scope_flags = scope_flags;
rc->n_pos_args = n_pos_args;
@@ -65,7 +65,7 @@ void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint
rc->u_byte.len = len;
#ifdef DEBUG_PRINT
- DEBUG_printf("assign byte code: code=%p len=%u n_pos_args=%d n_kwonly_args=%d flags=%x\n", code, len, n_pos_args, n_kwonly_args, scope_flags);
+ DEBUG_printf("assign byte code: code=%p len=" UINT_FMT " n_pos_args=" UINT_FMT " n_kwonly_args=" UINT_FMT " flags=%x\n", code, len, n_pos_args, n_kwonly_args, (uint)scope_flags);
DEBUG_printf(" arg names:");
for (int i = 0; i < n_pos_args + n_kwonly_args; i++) {
DEBUG_printf(" %s", qstr_str(arg_names[i]));
@@ -74,7 +74,7 @@ void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint
#endif
#if MICROPY_DEBUG_PRINTERS
if (mp_verbose_flag > 0) {
- for (int i = 0; i < 128 && i < len; i++) {
+ for (mp_uint_t i = 0; i < len; i++) {
if (i > 0 && i % 16 == 0) {
printf("\n");
}
@@ -87,22 +87,21 @@ void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint
}
#if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_THUMB
-void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun, uint len, int n_args, mp_uint_t type_sig) {
+void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun_data, mp_uint_t fun_len, mp_uint_t n_args, mp_uint_t type_sig) {
assert(kind == MP_CODE_NATIVE_PY || kind == MP_CODE_NATIVE_VIPER || kind == MP_CODE_NATIVE_ASM);
rc->kind = kind;
rc->scope_flags = 0;
rc->n_pos_args = n_args;
- rc->u_native.fun = fun;
+ rc->u_native.fun_data = fun_data;
rc->u_native.type_sig = type_sig;
#ifdef DEBUG_PRINT
- DEBUG_printf("assign native: kind=%d fun=%p len=%u n_args=%d\n", kind, fun, len, n_args);
- byte *fun_data = (byte*)(((mp_uint_t)fun) & (~1)); // need to clear lower bit in case it's thumb code
- for (int i = 0; i < 128 && i < len; i++) {
+ DEBUG_printf("assign native: kind=%d fun=%p len=" UINT_FMT " n_args=" UINT_FMT "\n", kind, fun_data, fun_len, n_args);
+ for (mp_uint_t i = 0; i < fun_len; i++) {
if (i > 0 && i % 16 == 0) {
DEBUG_printf("\n");
}
- DEBUG_printf(" %02x", fun_data[i]);
+ DEBUG_printf(" %02x", ((byte*)fun_data)[i]);
}
DEBUG_printf("\n");
@@ -133,15 +132,15 @@ mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp
break;
#if MICROPY_EMIT_NATIVE
case MP_CODE_NATIVE_PY:
- fun = mp_make_function_n(rc->n_pos_args, rc->u_native.fun);
+ fun = mp_obj_new_fun_native(rc->n_pos_args, rc->u_native.fun_data);
break;
case MP_CODE_NATIVE_VIPER:
- fun = mp_obj_new_fun_viper(rc->n_pos_args, rc->u_native.fun, rc->u_native.type_sig);
+ fun = mp_obj_new_fun_viper(rc->n_pos_args, rc->u_native.fun_data, rc->u_native.type_sig);
break;
#endif
#if MICROPY_EMIT_INLINE_THUMB
case MP_CODE_NATIVE_ASM:
- fun = mp_obj_new_fun_asm(rc->n_pos_args, rc->u_native.fun);
+ fun = mp_obj_new_fun_asm(rc->n_pos_args, rc->u_native.fun_data);
break;
#endif
default:
@@ -158,8 +157,8 @@ mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp
return fun;
}
-mp_obj_t mp_make_closure_from_raw_code(mp_raw_code_t *rc, uint n_closed_over, const mp_obj_t *args) {
- DEBUG_OP_printf("make_closure_from_raw_code %p %u %p\n", rc, n_closed_over, args);
+mp_obj_t mp_make_closure_from_raw_code(mp_raw_code_t *rc, mp_uint_t n_closed_over, const mp_obj_t *args) {
+ DEBUG_OP_printf("make_closure_from_raw_code %p " UINT_FMT " %p\n", rc, n_closed_over, args);
// make function object
mp_obj_t ffun;
if (n_closed_over & 0x100) {
diff --git a/py/emitglue.h b/py/emitglue.h
index f8363465e2..087b2296e9 100644
--- a/py/emitglue.h
+++ b/py/emitglue.h
@@ -37,17 +37,17 @@ typedef enum {
typedef struct _mp_code_t {
mp_raw_code_kind_t kind : 3;
- uint scope_flags : 7;
- uint n_pos_args : 11;
- uint n_kwonly_args : 11;
+ mp_uint_t scope_flags : 7;
+ mp_uint_t n_pos_args : 11;
+ mp_uint_t n_kwonly_args : 11;
qstr *arg_names;
union {
struct {
byte *code;
- uint len;
+ mp_uint_t len;
} u_byte;
struct {
- void *fun;
+ void *fun_data;
mp_uint_t type_sig; // for viper, compressed as 2-bit types; ret is MSB, then arg0, arg1, etc
} u_native;
};
@@ -55,8 +55,8 @@ typedef struct _mp_code_t {
mp_raw_code_t *mp_emit_glue_new_raw_code(void);
-void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint n_pos_args, uint n_kwonly_args, qstr *arg_names, uint scope_flags);
-void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *f, uint len, int n_args, mp_uint_t type_sig);
+void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, mp_uint_t len, mp_uint_t n_pos_args, mp_uint_t n_kwonly_args, qstr *arg_names, mp_uint_t scope_flags);
+void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun_data, mp_uint_t fun_len, mp_uint_t n_args, mp_uint_t type_sig);
mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp_obj_t def_kw_args);
-mp_obj_t mp_make_closure_from_raw_code(mp_raw_code_t *rc, uint n_closed_over, const mp_obj_t *args);
+mp_obj_t mp_make_closure_from_raw_code(mp_raw_code_t *rc, mp_uint_t n_closed_over, const mp_obj_t *args);
diff --git a/py/mpconfig.h b/py/mpconfig.h
index 850d05524b..73c015a09b 100644
--- a/py/mpconfig.h
+++ b/py/mpconfig.h
@@ -429,6 +429,12 @@ typedef double mp_float_t;
#define MP_ENDIANNESS_LITTLE (0)
#endif
+// Make a pointer to RAM callable (eg set lower bit for Thumb code)
+// (This scheme won't work if we want to mix Thumb and normal ARM code.)
+#ifndef MICROPY_MAKE_POINTER_CALLABLE
+#define MICROPY_MAKE_POINTER_CALLABLE(p) (p)
+#endif
+
// printf format spec to use for mp_int_t and friends
#ifndef INT_FMT
#ifdef __LP64__
diff --git a/py/obj.c b/py/obj.c
index d8fccfb7b2..eef5cce4e3 100644
--- a/py/obj.c
+++ b/py/obj.c
@@ -161,7 +161,7 @@ mp_int_t mp_obj_hash(mp_obj_t o_in) {
return mp_obj_str_get_hash(o_in);
} else if (MP_OBJ_IS_TYPE(o_in, &mp_type_NoneType)) {
return (mp_int_t)o_in;
- } else if (MP_OBJ_IS_TYPE(o_in, &mp_type_fun_native) || MP_OBJ_IS_TYPE(o_in, &mp_type_fun_bc)) {
+ } else if (MP_OBJ_IS_FUN(o_in)) {
return (mp_int_t)o_in;
} else if (MP_OBJ_IS_TYPE(o_in, &mp_type_tuple)) {
return mp_obj_tuple_hash(o_in);
diff --git a/py/obj.h b/py/obj.h
index 6fe671972f..26a387a2f9 100644
--- a/py/obj.h
+++ b/py/obj.h
@@ -71,9 +71,10 @@ typedef struct _mp_obj_base_t mp_obj_base_t;
//#define MP_OBJ_IS_SMALL_INT(o) ((((mp_int_t)(o)) & 1) != 0)
//#define MP_OBJ_IS_QSTR(o) ((((mp_int_t)(o)) & 3) == 2)
//#define MP_OBJ_IS_OBJ(o) ((((mp_int_t)(o)) & 3) == 0)
-#define MP_OBJ_IS_TYPE(o, t) (MP_OBJ_IS_OBJ(o) && (((mp_obj_base_t*)(o))->type == (t))) // this does not work for checking a string, use below macro for that
+#define MP_OBJ_IS_TYPE(o, t) (MP_OBJ_IS_OBJ(o) && (((mp_obj_base_t*)(o))->type == (t))) // this does not work for checking int, str or fun; use below macros for that
#define MP_OBJ_IS_INT(o) (MP_OBJ_IS_SMALL_INT(o) || MP_OBJ_IS_TYPE(o, &mp_type_int))
#define MP_OBJ_IS_STR(o) (MP_OBJ_IS_QSTR(o) || MP_OBJ_IS_TYPE(o, &mp_type_str))
+#define MP_OBJ_IS_FUN(o) (MP_OBJ_IS_OBJ(o) && (((mp_obj_base_t*)(o))->type->binary_op == mp_obj_fun_binary_op))
#define MP_OBJ_SMALL_INT_VALUE(o) (((mp_int_t)(o)) >> 1)
#define MP_OBJ_NEW_SMALL_INT(small_int) ((mp_obj_t)((((mp_int_t)(small_int)) << 1) | 1))
@@ -84,9 +85,9 @@ typedef struct _mp_obj_base_t mp_obj_base_t;
// These macros are used to declare and define constant function objects
// You can put "static" in front of the definitions to make them local
-#define MP_DECLARE_CONST_FUN_OBJ(obj_name) extern const mp_obj_fun_native_t obj_name
+#define MP_DECLARE_CONST_FUN_OBJ(obj_name) extern const mp_obj_fun_builtin_t obj_name
-#define MP_DEFINE_CONST_FUN_OBJ_VOID_PTR(obj_name, is_kw, n_args_min, n_args_max, fun_name) const mp_obj_fun_native_t obj_name = {{&mp_type_fun_native}, is_kw, n_args_min, n_args_max, (void *)fun_name}
+#define MP_DEFINE_CONST_FUN_OBJ_VOID_PTR(obj_name, is_kw, n_args_min, n_args_max, fun_name) const mp_obj_fun_builtin_t obj_name = {{&mp_type_fun_builtin}, is_kw, n_args_min, n_args_max, (void *)fun_name}
#define MP_DEFINE_CONST_FUN_OBJ_0(obj_name, fun_name) MP_DEFINE_CONST_FUN_OBJ_VOID_PTR(obj_name, false, 0, 0, (mp_fun_0_t)fun_name)
#define MP_DEFINE_CONST_FUN_OBJ_1(obj_name, fun_name) MP_DEFINE_CONST_FUN_OBJ_VOID_PTR(obj_name, false, 1, 1, (mp_fun_1_t)fun_name)
#define MP_DEFINE_CONST_FUN_OBJ_2(obj_name, fun_name) MP_DEFINE_CONST_FUN_OBJ_VOID_PTR(obj_name, false, 2, 2, (mp_fun_2_t)fun_name)
@@ -178,7 +179,6 @@ typedef mp_obj_t (*mp_fun_0_t)(void);
typedef mp_obj_t (*mp_fun_1_t)(mp_obj_t);
typedef mp_obj_t (*mp_fun_2_t)(mp_obj_t, mp_obj_t);
typedef mp_obj_t (*mp_fun_3_t)(mp_obj_t, mp_obj_t, mp_obj_t);
-typedef mp_obj_t (*mp_fun_t)(void);
typedef mp_obj_t (*mp_fun_var_t)(uint n, const mp_obj_t *);
typedef mp_obj_t (*mp_fun_kw_t)(uint n, const mp_obj_t *, mp_map_t *);
@@ -304,7 +304,7 @@ extern const mp_obj_type_t mp_type_zip;
extern const mp_obj_type_t mp_type_array;
extern const mp_obj_type_t mp_type_super;
extern const mp_obj_type_t mp_type_gen_instance;
-extern const mp_obj_type_t mp_type_fun_native;
+extern const mp_obj_type_t mp_type_fun_builtin;
extern const mp_obj_type_t mp_type_fun_bc;
extern const mp_obj_type_t mp_type_module;
extern const mp_obj_type_t mp_type_staticmethod;
@@ -377,9 +377,10 @@ mp_obj_t mp_obj_new_exception_arg1(const mp_obj_type_t *exc_type, mp_obj_t arg);
mp_obj_t mp_obj_new_exception_args(const mp_obj_type_t *exc_type, uint n_args, const mp_obj_t *args);
mp_obj_t mp_obj_new_exception_msg(const mp_obj_type_t *exc_type, const char *msg);
mp_obj_t mp_obj_new_exception_msg_varg(const mp_obj_type_t *exc_type, const char *fmt, ...); // counts args by number of % symbols in fmt, excluding %%; can only handle void* sizes (ie no float/double!)
-mp_obj_t mp_obj_new_fun_bc(uint scope_flags, qstr *args, uint n_pos_args, uint n_kwonly_args, mp_obj_t def_args, mp_obj_t def_kw_args, const byte *code);
-mp_obj_t mp_obj_new_fun_viper(uint n_args, void *fun, mp_uint_t type_sig);
-mp_obj_t mp_obj_new_fun_asm(uint n_args, void *fun);
+mp_obj_t mp_obj_new_fun_bc(mp_uint_t scope_flags, qstr *args, mp_uint_t n_pos_args, mp_uint_t n_kwonly_args, mp_obj_t def_args, mp_obj_t def_kw_args, const byte *code);
+mp_obj_t mp_obj_new_fun_native(mp_uint_t n_args, void *fun_data);
+mp_obj_t mp_obj_new_fun_viper(mp_uint_t n_args, void *fun_data, mp_uint_t type_sig);
+mp_obj_t mp_obj_new_fun_asm(mp_uint_t n_args, void *fun_data);
mp_obj_t mp_obj_new_gen_wrap(mp_obj_t fun);
mp_obj_t mp_obj_new_closure(mp_obj_t fun, uint n_closed, const mp_obj_t *closed);
mp_obj_t mp_obj_new_tuple(uint n, const mp_obj_t *items);
@@ -525,17 +526,15 @@ mp_obj_t mp_obj_new_bytearray_by_ref(uint n, void *items);
// functions
#define MP_OBJ_FUN_ARGS_MAX (0xffff) // to set maximum value in n_args_max below
-typedef struct _mp_obj_fun_native_t { // need this so we can define const objects (to go in ROM)
+typedef struct _mp_obj_fun_builtin_t { // use this to make const objects that go in ROM
mp_obj_base_t base;
bool is_kw : 1;
- uint n_args_min : 15; // inclusive
- uint n_args_max : 16; // inclusive
- void *fun;
- // TODO add mp_map_t *globals
- // for const function objects, make an empty, const map
- // such functions won't be able to access the global scope, but that's probably okay
-} mp_obj_fun_native_t;
+ mp_uint_t n_args_min : 15; // inclusive
+ mp_uint_t n_args_max : 16; // inclusive
+ void *fun; // must be a pointer to a callable function in ROM
+} mp_obj_fun_builtin_t;
+mp_obj_t mp_obj_fun_binary_op(int op, mp_obj_t lhs_in, mp_obj_t rhs_in);
const char *mp_obj_fun_get_name(mp_const_obj_t fun);
const char *mp_obj_code_get_name(const byte *code_info);
diff --git a/py/objfun.c b/py/objfun.c
index 63dbd4f15e..64c43e3e52 100644
--- a/py/objfun.c
+++ b/py/objfun.c
@@ -47,12 +47,9 @@
#define DEBUG_printf(...) (void)0
#endif
-/******************************************************************************/
-/* native functions */
-
-// mp_obj_fun_native_t defined in obj.h
-
-STATIC mp_obj_t fun_binary_op(int op, mp_obj_t lhs_in, mp_obj_t rhs_in) {
+// This binary_op method is used for all function types, and is also
+// used to determine if an object is of generic function type.
+mp_obj_t mp_obj_fun_binary_op(int op, mp_obj_t lhs_in, mp_obj_t rhs_in) {
switch (op) {
case MP_BINARY_OP_EQUAL:
// These objects can be equal only if it's the same underlying structure,
@@ -62,9 +59,14 @@ STATIC mp_obj_t fun_binary_op(int op, mp_obj_t lhs_in, mp_obj_t rhs_in) {
return MP_OBJ_NULL; // op not supported
}
-STATIC mp_obj_t fun_native_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_obj_t *args) {
- assert(MP_OBJ_IS_TYPE(self_in, &mp_type_fun_native));
- mp_obj_fun_native_t *self = self_in;
+/******************************************************************************/
+/* builtin functions */
+
+// mp_obj_fun_builtin_t defined in obj.h
+
+STATIC mp_obj_t fun_builtin_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_obj_t *args) {
+ assert(MP_OBJ_IS_TYPE(self_in, &mp_type_fun_builtin));
+ mp_obj_fun_builtin_t *self = self_in;
// check number of arguments
mp_arg_check_num(n_args, n_kw, self->n_args_min, self->n_args_max, self->is_kw);
@@ -107,26 +109,16 @@ STATIC mp_obj_t fun_native_call(mp_obj_t self_in, uint n_args, uint n_kw, const
}
}
-const mp_obj_type_t mp_type_fun_native = {
+const mp_obj_type_t mp_type_fun_builtin = {
{ &mp_type_type },
.name = MP_QSTR_function,
- .call = fun_native_call,
- .binary_op = fun_binary_op,
+ .call = fun_builtin_call,
+ .binary_op = mp_obj_fun_binary_op,
};
-// fun must have the correct signature for n_args fixed arguments
-mp_obj_t mp_make_function_n(int n_args, void *fun) {
- mp_obj_fun_native_t *o = m_new_obj(mp_obj_fun_native_t);
- o->base.type = &mp_type_fun_native;
- o->is_kw = false;
- o->n_args_min = n_args;
- o->n_args_max = n_args;
- o->fun = fun;
- return o;
-}
-
+#if 0 // currently unused, and semi-obsolete
mp_obj_t mp_make_function_var(int n_args_min, mp_fun_var_t fun) {
- mp_obj_fun_native_t *o = m_new_obj(mp_obj_fun_native_t);
+ mp_obj_fun_builtin_t *o = m_new_obj(mp_obj_fun_builtin_t);
o->base.type = &mp_type_fun_native;
o->is_kw = false;
o->n_args_min = n_args_min;
@@ -137,7 +129,7 @@ mp_obj_t mp_make_function_var(int n_args_min, mp_fun_var_t fun) {
// min and max are inclusive
mp_obj_t mp_make_function_var_between(int n_args_min, int n_args_max, mp_fun_var_t fun) {
- mp_obj_fun_native_t *o = m_new_obj(mp_obj_fun_native_t);
+ mp_obj_fun_builtin_t *o = m_new_obj(mp_obj_fun_builtin_t);
o->base.type = &mp_type_fun_native;
o->is_kw = false;
o->n_args_min = n_args_min;
@@ -145,6 +137,7 @@ mp_obj_t mp_make_function_var_between(int n_args_min, int n_args_max, mp_fun_var
o->fun = fun;
return o;
}
+#endif
/******************************************************************************/
/* byte code functions */
@@ -179,7 +172,7 @@ STATIC void dump_args(const mp_obj_t *a, int sz) {
#define dump_args(...) (void)0
#endif
-STATIC NORETURN void fun_pos_args_mismatch(mp_obj_fun_bc_t *f, uint expected, uint given) {
+STATIC NORETURN void fun_pos_args_mismatch(mp_obj_fun_bc_t *f, mp_uint_t expected, mp_uint_t given) {
#if MICROPY_ERROR_REPORTING == MICROPY_ERROR_REPORTING_TERSE
// Generic message, to be reused for other argument issues
nlr_raise(mp_obj_new_exception_msg(&mp_type_TypeError,
@@ -204,7 +197,7 @@ STATIC NORETURN void fun_pos_args_mismatch(mp_obj_fun_bc_t *f, uint expected, ui
// code_state should have ->ip filled in (pointing past code info block),
// as well as ->n_state.
-void mp_setup_code_state(mp_code_state *code_state, mp_obj_t self_in, uint n_args, uint n_kw, const mp_obj_t *args) {
+void mp_setup_code_state(mp_code_state *code_state, mp_obj_t self_in, mp_uint_t n_args, mp_uint_t n_kw, const mp_obj_t *args) {
// This function is pretty complicated. It's main aim is to be efficient in speed and RAM
// usage for the common case of positional only args.
mp_obj_fun_bc_t *self = self_in;
@@ -243,7 +236,7 @@ void mp_setup_code_state(mp_code_state *code_state, mp_obj_t self_in, uint n_arg
if (n_kw == 0 && !self->has_def_kw_args) {
if (n_args >= self->n_pos_args - self->n_def_args) {
// given enough arguments, but may need to use some default arguments
- for (uint i = n_args; i < self->n_pos_args; i++) {
+ for (mp_uint_t i = n_args; i < self->n_pos_args; i++) {
code_state->state[n_state - 1 - i] = self->extra_args[i - (self->n_pos_args - self->n_def_args)];
}
} else {
@@ -253,7 +246,7 @@ void mp_setup_code_state(mp_code_state *code_state, mp_obj_t self_in, uint n_arg
}
// copy positional args into state
- for (uint i = 0; i < n_args; i++) {
+ for (mp_uint_t i = 0; i < n_args; i++) {
code_state->state[n_state - 1 - i] = args[i];
}
@@ -269,9 +262,9 @@ void mp_setup_code_state(mp_code_state *code_state, mp_obj_t self_in, uint n_arg
*var_pos_kw_args = dict;
}
- for (uint i = 0; i < n_kw; i++) {
+ for (mp_uint_t i = 0; i < n_kw; i++) {
qstr arg_name = MP_OBJ_QSTR_VALUE(kwargs[2 * i]);
- for (uint j = 0; j < self->n_pos_args + self->n_kwonly_args; j++) {
+ for (mp_uint_t j = 0; j < self->n_pos_args + self->n_kwonly_args; j++) {
if (arg_name == self->args[j]) {
if (code_state->state[n_state - 1 - j] != MP_OBJ_NULL) {
nlr_raise(mp_obj_new_exception_msg_varg(&mp_type_TypeError,
@@ -314,7 +307,7 @@ continue2:;
// Check that all mandatory keyword args are specified
// Fill in default kw args if we have them
- for (uint i = 0; i < self->n_kwonly_args; i++) {
+ for (mp_uint_t i = 0; i < self->n_kwonly_args; i++) {
if (code_state->state[n_state - 1 - self->n_pos_args - i] == MP_OBJ_NULL) {
mp_map_elem_t *elem = NULL;
if (self->has_def_kw_args) {
@@ -341,8 +334,8 @@ continue2:;
}
// bytecode prelude: initialise closed over variables
- for (uint n_local = *ip++; n_local > 0; n_local--) {
- uint local_num = *ip++;
+ for (mp_uint_t n_local = *ip++; n_local > 0; n_local--) {
+ mp_uint_t local_num = *ip++;
code_state->state[n_state - 1 - local_num] = mp_obj_new_cell(code_state->state[n_state - 1 - local_num]);
}
@@ -382,7 +375,7 @@ STATIC mp_obj_t fun_bc_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_o
#endif
// allocate state for locals and stack
- uint state_size = n_state * sizeof(mp_obj_t) + n_exc_stack * sizeof(mp_exc_stack_t);
+ mp_uint_t state_size = n_state * sizeof(mp_obj_t) + n_exc_stack * sizeof(mp_exc_stack_t);
mp_code_state *code_state;
if (state_size > VM_MAX_STATE_ON_STACK) {
code_state = m_new_obj_var(mp_code_state, byte, state_size);
@@ -413,7 +406,7 @@ STATIC mp_obj_t fun_bc_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_o
if (!(vm_return_kind == MP_VM_RETURN_EXCEPTION && self->n_pos_args + self->n_kwonly_args == 0)) {
// Just check to see that we have at least 1 null object left in the state.
bool overflow = true;
- for (uint i = 0; i < n_state - self->n_pos_args - self->n_kwonly_args; i++) {
+ for (mp_uint_t i = 0; i < n_state - self->n_pos_args - self->n_kwonly_args; i++) {
if (code_state->state[i] == MP_OBJ_NULL) {
overflow = false;
break;
@@ -465,12 +458,12 @@ const mp_obj_type_t mp_type_fun_bc = {
.print = fun_bc_print,
#endif
.call = fun_bc_call,
- .binary_op = fun_binary_op,
+ .binary_op = mp_obj_fun_binary_op,
};
-mp_obj_t mp_obj_new_fun_bc(uint scope_flags, qstr *args, uint n_pos_args, uint n_kwonly_args, mp_obj_t def_args_in, mp_obj_t def_kw_args, const byte *code) {
- uint n_def_args = 0;
- uint n_extra_args = 0;
+mp_obj_t mp_obj_new_fun_bc(mp_uint_t scope_flags, qstr *args, mp_uint_t n_pos_args, mp_uint_t n_kwonly_args, mp_obj_t def_args_in, mp_obj_t def_kw_args, const byte *code) {
+ mp_uint_t n_def_args = 0;
+ mp_uint_t n_extra_args = 0;
mp_obj_tuple_t *def_args = def_args_in;
if (def_args != MP_OBJ_NULL) {
assert(MP_OBJ_IS_TYPE(def_args, &mp_type_tuple));
@@ -501,14 +494,75 @@ mp_obj_t mp_obj_new_fun_bc(uint scope_flags, qstr *args, uint n_pos_args, uint n
}
/******************************************************************************/
+/* native functions */
+
+#if MICROPY_EMIT_NATIVE
+
+typedef struct _mp_obj_fun_native_t {
+ mp_obj_base_t base;
+ mp_uint_t n_args;
+ void *fun_data; // GC must be able to trace this pointer
+ // TODO add mp_map_t *globals
+} mp_obj_fun_native_t;
+
+typedef mp_obj_t (*native_fun_0_t)();
+typedef mp_obj_t (*native_fun_1_t)(mp_obj_t);
+typedef mp_obj_t (*native_fun_2_t)(mp_obj_t, mp_obj_t);
+typedef mp_obj_t (*native_fun_3_t)(mp_obj_t, mp_obj_t, mp_obj_t);
+
+STATIC mp_obj_t fun_native_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_obj_t *args) {
+ mp_obj_fun_native_t *self = self_in;
+
+ mp_arg_check_num(n_args, n_kw, self->n_args, self->n_args, false);
+
+ void *fun = MICROPY_MAKE_POINTER_CALLABLE(self->fun_data);
+
+ switch (n_args) {
+ case 0:
+ return ((native_fun_0_t)fun)();
+
+ case 1:
+ return ((native_fun_1_t)fun)(args[0]);
+
+ case 2:
+ return ((native_fun_2_t)fun)(args[0], args[1]);
+
+ case 3:
+ return ((native_fun_3_t)fun)(args[0], args[1], args[2]);
+
+ default:
+ assert(0);
+ return mp_const_none;
+ }
+}
+
+STATIC const mp_obj_type_t mp_type_fun_native = {
+ { &mp_type_type },
+ .name = MP_QSTR_function,
+ .call = fun_native_call,
+ .binary_op = mp_obj_fun_binary_op,
+};
+
+mp_obj_t mp_obj_new_fun_native(mp_uint_t n_args, void *fun_data) {
+ assert(0 <= n_args && n_args <= 3);
+ mp_obj_fun_native_t *o = m_new_obj(mp_obj_fun_native_t);
+ o->base.type = &mp_type_fun_native;
+ o->n_args = n_args;
+ o->fun_data = fun_data;
+ return o;
+}
+
+#endif // MICROPY_EMIT_NATIVE
+
+/******************************************************************************/
/* viper functions */
#if MICROPY_EMIT_NATIVE
typedef struct _mp_obj_fun_viper_t {
mp_obj_base_t base;
- int n_args;
- void *fun;
+ mp_uint_t n_args;
+ void *fun_data; // GC must be able to trace this pointer
mp_uint_t type_sig;
} mp_obj_fun_viper_t;
@@ -522,15 +576,17 @@ STATIC mp_obj_t fun_viper_call(mp_obj_t self_in, uint n_args, uint n_kw, const m
mp_arg_check_num(n_args, n_kw, self->n_args, self->n_args, false);
+ void *fun = MICROPY_MAKE_POINTER_CALLABLE(self->fun_data);
+
mp_uint_t ret;
if (n_args == 0) {
- ret = ((viper_fun_0_t)self->fun)();
+ ret = ((viper_fun_0_t)fun)();
} else if (n_args == 1) {
- ret = ((viper_fun_1_t)self->fun)(mp_convert_obj_to_native(args[0], self->type_sig >> 2));
+ ret = ((viper_fun_1_t)fun)(mp_convert_obj_to_native(args[0], self->type_sig >> 2));
} else if (n_args == 2) {
- ret = ((viper_fun_2_t)self->fun)(mp_convert_obj_to_native(args[0], self->type_sig >> 2), mp_convert_obj_to_native(args[1], self->type_sig >> 4));
+ ret = ((viper_fun_2_t)fun)(mp_convert_obj_to_native(args[0], self->type_sig >> 2), mp_convert_obj_to_native(args[1], self->type_sig >> 4));
} else if (n_args == 3) {
- ret = ((viper_fun_3_t)self->fun)(mp_convert_obj_to_native(args[0], self->type_sig >> 2), mp_convert_obj_to_native(args[1], self->type_sig >> 4), mp_convert_obj_to_native(args[2], self->type_sig >> 6));
+ ret = ((viper_fun_3_t)fun)(mp_convert_obj_to_native(args[0], self->type_sig >> 2), mp_convert_obj_to_native(args[1], self->type_sig >> 4), mp_convert_obj_to_native(args[2], self->type_sig >> 6));
} else {
assert(0);
ret = 0;
@@ -543,14 +599,14 @@ STATIC const mp_obj_type_t mp_type_fun_viper = {
{ &mp_type_type },
.name = MP_QSTR_function,
.call = fun_viper_call,
- .binary_op = fun_binary_op,
+ .binary_op = mp_obj_fun_binary_op,
};
-mp_obj_t mp_obj_new_fun_viper(uint n_args, void *fun, mp_uint_t type_sig) {
+mp_obj_t mp_obj_new_fun_viper(mp_uint_t n_args, void *fun_data, mp_uint_t type_sig) {
mp_obj_fun_viper_t *o = m_new_obj(mp_obj_fun_viper_t);
o->base.type = &mp_type_fun_viper;
o->n_args = n_args;
- o->fun = fun;
+ o->fun_data = fun_data;
o->type_sig = type_sig;
return o;
}
@@ -564,8 +620,8 @@ mp_obj_t mp_obj_new_fun_viper(uint n_args, void *fun, mp_uint_t type_sig) {
typedef struct _mp_obj_fun_asm_t {
mp_obj_base_t base;
- int n_args;
- void *fun;
+ mp_uint_t n_args;
+ void *fun_data; // GC must be able to trace this pointer
} mp_obj_fun_asm_t;
typedef mp_uint_t (*inline_asm_fun_0_t)();
@@ -631,15 +687,17 @@ STATIC mp_obj_t fun_asm_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_
mp_arg_check_num(n_args, n_kw, self->n_args, self->n_args, false);
+ void *fun = MICROPY_MAKE_POINTER_CALLABLE(self->fun_data);
+
mp_uint_t ret;
if (n_args == 0) {
- ret = ((inline_asm_fun_0_t)self->fun)();
+ ret = ((inline_asm_fun_0_t)fun)();
} else if (n_args == 1) {
- ret = ((inline_asm_fun_1_t)self->fun)(convert_obj_for_inline_asm(args[0]));
+ ret = ((inline_asm_fun_1_t)fun)(convert_obj_for_inline_asm(args[0]));
} else if (n_args == 2) {
- ret = ((inline_asm_fun_2_t)self->fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]));
+ ret = ((inline_asm_fun_2_t)fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]));
} else if (n_args == 3) {
- ret = ((inline_asm_fun_3_t)self->fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]), convert_obj_for_inline_asm(args[2]));
+ ret = ((inline_asm_fun_3_t)fun)(convert_obj_for_inline_asm(args[0]), convert_obj_for_inline_asm(args[1]), convert_obj_for_inline_asm(args[2]));
} else {
assert(0);
ret = 0;
@@ -652,14 +710,14 @@ STATIC const mp_obj_type_t mp_type_fun_asm = {
{ &mp_type_type },
.name = MP_QSTR_function,
.call = fun_asm_call,
- .binary_op = fun_binary_op,
+ .binary_op = mp_obj_fun_binary_op,
};
-mp_obj_t mp_obj_new_fun_asm(uint n_args, void *fun) {
+mp_obj_t mp_obj_new_fun_asm(mp_uint_t n_args, void *fun_data) {
mp_obj_fun_asm_t *o = m_new_obj(mp_obj_fun_asm_t);
o->base.type = &mp_type_fun_asm;
o->n_args = n_args;
- o->fun = fun;
+ o->fun_data = fun_data;
return o;
}
diff --git a/py/objtype.c b/py/objtype.c
index 7689e42b25..b760b3240c 100644
--- a/py/objtype.c
+++ b/py/objtype.c
@@ -791,7 +791,7 @@ mp_obj_t mp_obj_new_type(qstr name, mp_obj_t bases_tuple, mp_obj_t locals_dict)
mp_map_elem_t *elem = mp_map_lookup(locals_map, MP_OBJ_NEW_QSTR(MP_QSTR___new__), MP_MAP_LOOKUP);
if (elem != NULL) {
// __new__ slot exists; check if it is a function
- if (MP_OBJ_IS_TYPE(elem->value, &mp_type_fun_native) || MP_OBJ_IS_TYPE(elem->value, &mp_type_fun_bc)) {
+ if (MP_OBJ_IS_FUN(elem->value)) {
// __new__ is a function, wrap it in a staticmethod decorator
elem->value = static_class_method_make_new((mp_obj_t)&mp_type_staticmethod, 1, 0, &elem->value);
}
diff --git a/py/runtime.h b/py/runtime.h
index c46087d145..075353e48e 100644
--- a/py/runtime.h
+++ b/py/runtime.h
@@ -80,10 +80,6 @@ mp_obj_t mp_load_const_dec(qstr qstr);
mp_obj_t mp_load_const_str(qstr qstr);
mp_obj_t mp_load_const_bytes(qstr qstr);
-mp_obj_t mp_make_function_n(int n_args, void *fun); // fun must have the correct signature for n_args fixed arguments
-mp_obj_t mp_make_function_var(int n_args_min, mp_fun_var_t fun);
-mp_obj_t mp_make_function_var_between(int n_args_min, int n_args_max, mp_fun_var_t fun); // min and max are inclusive
-
mp_obj_t mp_call_function_0(mp_obj_t fun);
mp_obj_t mp_call_function_1(mp_obj_t fun, mp_obj_t arg);
mp_obj_t mp_call_function_2(mp_obj_t fun, mp_obj_t arg1, mp_obj_t arg2);