py: Allow viper to have type annotations.
Viper functions can now be annotated with the type of their arguments and return value. Eg: @micropython.viper def f(x:int) -> int: return x + 1
This commit is contained in:
parent
6be0b0a8ec
commit
2ac4af6946
108
py/compile.c
108
py/compile.c
@ -48,8 +48,6 @@
|
||||
|
||||
// TODO need to mangle __attr names
|
||||
|
||||
#define MICROPY_EMIT_NATIVE (MICROPY_EMIT_X64 || MICROPY_EMIT_THUMB)
|
||||
|
||||
typedef enum {
|
||||
PN_none = 0,
|
||||
#define DEF_RULE(rule, comp, kind, ...) PN_##rule,
|
||||
@ -1745,6 +1743,7 @@ void compile_while_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) {
|
||||
EMIT_ARG(label_assign, break_label);
|
||||
}
|
||||
|
||||
#if !MICROPY_EMIT_CPYTHON
|
||||
// TODO preload end and step onto stack if they are not constants
|
||||
// Note that, as per semantics of for .. range, the final failing value should not be stored in the loop variable
|
||||
// And, if the loop never runs, the loop variable should never be assigned
|
||||
@ -1801,6 +1800,7 @@ STATIC void compile_for_stmt_optimised_range(compiler_t *comp, mp_parse_node_t p
|
||||
|
||||
EMIT_ARG(label_assign, break_label);
|
||||
}
|
||||
#endif
|
||||
|
||||
void compile_for_stmt(compiler_t *comp, mp_parse_node_struct_t *pns) {
|
||||
#if !MICROPY_EMIT_CPYTHON
|
||||
@ -2902,11 +2902,10 @@ STATIC void compile_node(compiler_t *comp, mp_parse_node_t pn) {
|
||||
}
|
||||
}
|
||||
|
||||
STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn, pn_kind_t pn_name, pn_kind_t pn_star, pn_kind_t pn_dbl_star, bool allow_annotations) {
|
||||
STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn, pn_kind_t pn_name, pn_kind_t pn_star, pn_kind_t pn_dbl_star) {
|
||||
// TODO verify that *k and **k are last etc
|
||||
qstr param_name = MP_QSTR_NULL;
|
||||
uint param_flag = ID_FLAG_IS_PARAM;
|
||||
mp_parse_node_t pn_annotation = MP_PARSE_NODE_NULL;
|
||||
if (MP_PARSE_NODE_IS_ID(pn)) {
|
||||
param_name = MP_PARSE_NODE_LEAF_ARG(pn);
|
||||
if (comp->have_star) {
|
||||
@ -2921,24 +2920,6 @@ STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn
|
||||
mp_parse_node_struct_t *pns = (mp_parse_node_struct_t*)pn;
|
||||
if (MP_PARSE_NODE_STRUCT_KIND(pns) == pn_name) {
|
||||
param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]);
|
||||
//int node_index = 1; unused
|
||||
if (allow_annotations) {
|
||||
if (!MP_PARSE_NODE_IS_NULL(pns->nodes[1])) {
|
||||
// this parameter has an annotation
|
||||
pn_annotation = pns->nodes[1];
|
||||
}
|
||||
//node_index = 2; unused
|
||||
}
|
||||
/* this is obsolete now that num dict/default params are calculated in compile_funcdef_param
|
||||
if (!MP_PARSE_NODE_IS_NULL(pns->nodes[node_index])) {
|
||||
// this parameter has a default value
|
||||
if (comp->have_star) {
|
||||
comp->scope_cur->num_dict_params += 1;
|
||||
} else {
|
||||
comp->scope_cur->num_default_params += 1;
|
||||
}
|
||||
}
|
||||
*/
|
||||
if (comp->have_star) {
|
||||
// comes after a star, so counts as a keyword-only parameter
|
||||
comp->scope_cur->num_kwonly_args += 1;
|
||||
@ -2957,12 +2938,11 @@ STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn
|
||||
// named star
|
||||
comp->scope_cur->scope_flags |= MP_SCOPE_FLAG_VARARGS;
|
||||
param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]);
|
||||
} else if (allow_annotations && MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_tfpdef)) {
|
||||
} else if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_tfpdef)) {
|
||||
// named star with possible annotation
|
||||
comp->scope_cur->scope_flags |= MP_SCOPE_FLAG_VARARGS;
|
||||
pns = (mp_parse_node_struct_t*)pns->nodes[0];
|
||||
param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]);
|
||||
pn_annotation = pns->nodes[1];
|
||||
} else {
|
||||
// shouldn't happen
|
||||
assert(0);
|
||||
@ -2970,10 +2950,6 @@ STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn
|
||||
} else if (MP_PARSE_NODE_STRUCT_KIND(pns) == pn_dbl_star) {
|
||||
param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]);
|
||||
param_flag = ID_FLAG_IS_PARAM | ID_FLAG_IS_DBL_STAR_PARAM;
|
||||
if (allow_annotations && !MP_PARSE_NODE_IS_NULL(pns->nodes[1])) {
|
||||
// this parameter has an annotation
|
||||
pn_annotation = pns->nodes[1];
|
||||
}
|
||||
comp->scope_cur->scope_flags |= MP_SCOPE_FLAG_VARKEYWORDS;
|
||||
} else {
|
||||
// TODO anything to implement?
|
||||
@ -2982,9 +2958,6 @@ STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn
|
||||
}
|
||||
|
||||
if (param_name != MP_QSTR_NULL) {
|
||||
if (!MP_PARSE_NODE_IS_NULL(pn_annotation)) {
|
||||
// TODO this parameter has an annotation
|
||||
}
|
||||
bool added;
|
||||
id_info_t *id_info = scope_find_or_add_id(comp->scope_cur, param_name, &added);
|
||||
if (!added) {
|
||||
@ -2997,11 +2970,58 @@ STATIC void compile_scope_func_lambda_param(compiler_t *comp, mp_parse_node_t pn
|
||||
}
|
||||
|
||||
STATIC void compile_scope_func_param(compiler_t *comp, mp_parse_node_t pn) {
|
||||
compile_scope_func_lambda_param(comp, pn, PN_typedargslist_name, PN_typedargslist_star, PN_typedargslist_dbl_star, true);
|
||||
compile_scope_func_lambda_param(comp, pn, PN_typedargslist_name, PN_typedargslist_star, PN_typedargslist_dbl_star);
|
||||
}
|
||||
|
||||
STATIC void compile_scope_lambda_param(compiler_t *comp, mp_parse_node_t pn) {
|
||||
compile_scope_func_lambda_param(comp, pn, PN_varargslist_name, PN_varargslist_star, PN_varargslist_dbl_star, false);
|
||||
compile_scope_func_lambda_param(comp, pn, PN_varargslist_name, PN_varargslist_star, PN_varargslist_dbl_star);
|
||||
}
|
||||
|
||||
STATIC void compile_scope_func_annotations(compiler_t *comp, mp_parse_node_t pn) {
|
||||
if (!MP_PARSE_NODE_IS_STRUCT(pn)) {
|
||||
// no annotation
|
||||
return;
|
||||
}
|
||||
|
||||
mp_parse_node_struct_t *pns = (mp_parse_node_struct_t*)pn;
|
||||
if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_typedargslist_name) {
|
||||
// named parameter with possible annotation
|
||||
// fallthrough
|
||||
} else if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_typedargslist_star) {
|
||||
if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_tfpdef)) {
|
||||
// named star with possible annotation
|
||||
pns = (mp_parse_node_struct_t*)pns->nodes[0];
|
||||
// fallthrough
|
||||
} else {
|
||||
// no annotation
|
||||
return;
|
||||
}
|
||||
} else if (MP_PARSE_NODE_STRUCT_KIND(pns) == PN_typedargslist_dbl_star) {
|
||||
// double star with possible annotation
|
||||
// fallthrough
|
||||
} else {
|
||||
// no annotation
|
||||
return;
|
||||
}
|
||||
|
||||
mp_parse_node_t pn_annotation = pns->nodes[1];
|
||||
|
||||
if (!MP_PARSE_NODE_IS_NULL(pn_annotation)) {
|
||||
#if MICROPY_EMIT_NATIVE
|
||||
qstr param_name = MP_PARSE_NODE_LEAF_ARG(pns->nodes[0]);
|
||||
id_info_t *id_info = scope_find(comp->scope_cur, param_name);
|
||||
assert(id_info != NULL);
|
||||
|
||||
if (comp->scope_cur->emit_options == MP_EMIT_OPT_VIPER) {
|
||||
if (MP_PARSE_NODE_IS_ID(pn_annotation)) {
|
||||
qstr arg_type = MP_PARSE_NODE_LEAF_ARG(pn_annotation);
|
||||
EMIT_ARG(set_native_type, MP_EMIT_NATIVE_TYPE_ARG, id_info->local_num, arg_type);
|
||||
} else {
|
||||
compile_syntax_error(comp, pn_annotation, "annotation must be an identifier");
|
||||
}
|
||||
}
|
||||
#endif // MICROPY_EMIT_NATIVE
|
||||
}
|
||||
}
|
||||
|
||||
STATIC void compile_scope_comp_iter(compiler_t *comp, mp_parse_node_t pn_iter, mp_parse_node_t pn_inner_expr, int l_top, int for_depth) {
|
||||
@ -3128,9 +3148,25 @@ STATIC void compile_scope(compiler_t *comp, scope_t *scope, pass_kind_t pass) {
|
||||
if (comp->pass == MP_PASS_SCOPE) {
|
||||
comp->have_star = false;
|
||||
apply_to_single_or_list(comp, pns->nodes[1], PN_typedargslist, compile_scope_func_param);
|
||||
}
|
||||
} else {
|
||||
// compile annotations; only needed on latter compiler passes
|
||||
|
||||
// pns->nodes[2] is return/whole function annotation
|
||||
// argument annotations
|
||||
apply_to_single_or_list(comp, pns->nodes[1], PN_typedargslist, compile_scope_func_annotations);
|
||||
|
||||
// pns->nodes[2] is return/whole function annotation
|
||||
#if MICROPY_EMIT_NATIVE
|
||||
if (scope->emit_options == MP_EMIT_OPT_VIPER) {
|
||||
// nodes[2] can be null or a test-expr
|
||||
if (MP_PARSE_NODE_IS_ID(pns->nodes[2])) {
|
||||
qstr ret_type = MP_PARSE_NODE_LEAF_ARG(pns->nodes[2]);
|
||||
EMIT_ARG(set_native_type, MP_EMIT_NATIVE_TYPE_RETURN, 0, ret_type);
|
||||
} else {
|
||||
compile_syntax_error(comp, pns->nodes[2], "annotation must be an identifier");
|
||||
}
|
||||
}
|
||||
#endif // MICROPY_EMIT_NATIVE
|
||||
}
|
||||
|
||||
compile_node(comp, pns->nodes[3]); // 3 is function body
|
||||
// emit return if it wasn't the last opcode
|
||||
@ -3589,7 +3625,7 @@ mp_obj_t mp_compile(mp_parse_node_t pn, qstr source_file, uint emit_opt, bool is
|
||||
comp->emit_method_table = &emit_native_thumb_method_table;
|
||||
#endif
|
||||
comp->emit = emit_native;
|
||||
comp->emit_method_table->set_native_types(comp->emit, s->emit_options == MP_EMIT_OPT_VIPER);
|
||||
comp->emit_method_table->set_native_type(comp->emit, MP_EMIT_NATIVE_TYPE_ENABLE, s->emit_options == MP_EMIT_OPT_VIPER, 0);
|
||||
|
||||
// native emitters need an extra pass to compute stack size
|
||||
compile_scope(comp, s, MP_PASS_STACK_SIZE);
|
||||
|
@ -46,10 +46,14 @@ typedef enum {
|
||||
|
||||
#define MP_EMIT_BREAK_FROM_FOR (0x8000)
|
||||
|
||||
#define MP_EMIT_NATIVE_TYPE_ENABLE (0)
|
||||
#define MP_EMIT_NATIVE_TYPE_RETURN (1)
|
||||
#define MP_EMIT_NATIVE_TYPE_ARG (2)
|
||||
|
||||
typedef struct _emit_t emit_t;
|
||||
|
||||
typedef struct _emit_method_table_t {
|
||||
void (*set_native_types)(emit_t *emit, bool do_native_types);
|
||||
void (*set_native_type)(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2);
|
||||
void (*start_pass)(emit_t *emit, pass_kind_t pass, scope_t *scope);
|
||||
void (*end_pass)(emit_t *emit);
|
||||
bool (*last_emit_was_return_value)(emit_t *emit);
|
||||
|
@ -265,7 +265,7 @@ STATIC void emit_write_bytecode_byte_signed_label(emit_t* emit, byte b1, uint la
|
||||
c[2] = bytecode_offset >> 8;
|
||||
}
|
||||
|
||||
STATIC void emit_bc_set_native_types(emit_t *emit, bool do_native_types) {
|
||||
STATIC void emit_bc_set_native_type(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2) {
|
||||
}
|
||||
|
||||
STATIC void emit_bc_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
|
||||
@ -872,7 +872,7 @@ STATIC void emit_bc_end_except_handler(emit_t *emit) {
|
||||
}
|
||||
|
||||
const emit_method_table_t emit_bc_method_table = {
|
||||
emit_bc_set_native_types,
|
||||
emit_bc_set_native_type,
|
||||
emit_bc_start_pass,
|
||||
emit_bc_end_pass,
|
||||
emit_bc_last_emit_was_return_value,
|
||||
|
@ -63,7 +63,7 @@ emit_t *emit_cpython_new(uint max_num_labels) {
|
||||
return emit;
|
||||
}
|
||||
|
||||
STATIC void emit_cpy_set_native_types(emit_t *emit, bool do_native_types) {
|
||||
STATIC void emit_cpy_set_native_type(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2) {
|
||||
}
|
||||
|
||||
STATIC void emit_cpy_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
|
||||
@ -822,7 +822,7 @@ STATIC void emit_cpy_setup_loop(emit_t *emit, uint label) {
|
||||
}
|
||||
|
||||
const emit_method_table_t emit_cpython_method_table = {
|
||||
emit_cpy_set_native_types,
|
||||
emit_cpy_set_native_type,
|
||||
emit_cpy_start_pass,
|
||||
emit_cpy_end_pass,
|
||||
emit_cpy_last_emit_was_return_value,
|
||||
|
@ -86,12 +86,14 @@ void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint
|
||||
#endif
|
||||
}
|
||||
|
||||
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun, uint len, int n_args) {
|
||||
#if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_THUMB
|
||||
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *fun, uint len, int n_args, mp_uint_t type_sig) {
|
||||
assert(kind == MP_CODE_NATIVE_PY || kind == MP_CODE_NATIVE_VIPER || kind == MP_CODE_NATIVE_ASM);
|
||||
rc->kind = kind;
|
||||
rc->scope_flags = 0;
|
||||
rc->n_pos_args = n_args;
|
||||
rc->u_native.fun = fun;
|
||||
rc->u_native.type_sig = type_sig;
|
||||
|
||||
#ifdef DEBUG_PRINT
|
||||
DEBUG_printf("assign native: kind=%d fun=%p len=%u n_args=%d\n", kind, fun, len, n_args);
|
||||
@ -111,6 +113,7 @@ void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void
|
||||
#endif
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
||||
mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp_obj_t def_kw_args) {
|
||||
DEBUG_OP_printf("make_function_from_raw_code %p\n", rc);
|
||||
@ -128,13 +131,19 @@ mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp
|
||||
case MP_CODE_BYTECODE:
|
||||
fun = mp_obj_new_fun_bc(rc->scope_flags, rc->arg_names, rc->n_pos_args, rc->n_kwonly_args, def_args, def_kw_args, rc->u_byte.code);
|
||||
break;
|
||||
#if MICROPY_EMIT_NATIVE
|
||||
case MP_CODE_NATIVE_PY:
|
||||
fun = mp_make_function_n(rc->n_pos_args, rc->u_native.fun);
|
||||
break;
|
||||
case MP_CODE_NATIVE_VIPER:
|
||||
fun = mp_obj_new_fun_viper(rc->n_pos_args, rc->u_native.fun, rc->u_native.type_sig);
|
||||
break;
|
||||
#endif
|
||||
#if MICROPY_EMIT_INLINE_THUMB
|
||||
case MP_CODE_NATIVE_ASM:
|
||||
fun = mp_obj_new_fun_asm(rc->n_pos_args, rc->u_native.fun);
|
||||
break;
|
||||
#endif
|
||||
default:
|
||||
// raw code was never set (this should not happen)
|
||||
assert(0);
|
||||
|
@ -48,6 +48,7 @@ typedef struct _mp_code_t {
|
||||
} u_byte;
|
||||
struct {
|
||||
void *fun;
|
||||
mp_uint_t type_sig; // for viper, compressed as 2-bit types; ret is MSB, then arg0, arg1, etc
|
||||
} u_native;
|
||||
};
|
||||
} mp_raw_code_t;
|
||||
@ -55,7 +56,7 @@ typedef struct _mp_code_t {
|
||||
mp_raw_code_t *mp_emit_glue_new_raw_code(void);
|
||||
|
||||
void mp_emit_glue_assign_bytecode(mp_raw_code_t *rc, byte *code, uint len, uint n_pos_args, uint n_kwonly_args, qstr *arg_names, uint scope_flags);
|
||||
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *f, uint len, int n_args);
|
||||
void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void *f, uint len, int n_args, mp_uint_t type_sig);
|
||||
|
||||
mp_obj_t mp_make_function_from_raw_code(mp_raw_code_t *rc, mp_obj_t def_args, mp_obj_t def_kw_args);
|
||||
mp_obj_t mp_make_closure_from_raw_code(mp_raw_code_t *rc, uint n_closed_over, const mp_obj_t *args);
|
||||
|
@ -99,7 +99,7 @@ STATIC bool emit_inline_thumb_end_pass(emit_inline_asm_t *emit) {
|
||||
|
||||
if (emit->pass == MP_PASS_EMIT) {
|
||||
void *f = asm_thumb_get_code(emit->as);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, MP_CODE_NATIVE_ASM, f, asm_thumb_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, MP_CODE_NATIVE_ASM, f, asm_thumb_get_code_size(emit->as), emit->scope->num_pos_args, 0);
|
||||
}
|
||||
|
||||
return emit->success;
|
||||
|
@ -126,10 +126,11 @@ typedef enum {
|
||||
} stack_info_kind_t;
|
||||
|
||||
typedef enum {
|
||||
VTYPE_PYOBJ = MP_NATIVE_TYPE_OBJ,
|
||||
VTYPE_BOOL = MP_NATIVE_TYPE_BOOL,
|
||||
VTYPE_INT = MP_NATIVE_TYPE_INT,
|
||||
VTYPE_UINT = MP_NATIVE_TYPE_UINT,
|
||||
VTYPE_UNBOUND,
|
||||
VTYPE_PYOBJ,
|
||||
VTYPE_BOOL,
|
||||
VTYPE_INT,
|
||||
VTYPE_PTR,
|
||||
VTYPE_PTR_NONE,
|
||||
VTYPE_BUILTIN_V_INT,
|
||||
@ -149,6 +150,8 @@ struct _emit_t {
|
||||
|
||||
bool do_viper_types;
|
||||
|
||||
vtype_kind_t return_vtype;
|
||||
|
||||
uint local_vtype_alloc;
|
||||
vtype_kind_t *local_vtype;
|
||||
|
||||
@ -190,8 +193,30 @@ void EXPORT_FUN(free)(emit_t *emit) {
|
||||
m_del_obj(emit_t, emit);
|
||||
}
|
||||
|
||||
STATIC void emit_native_set_viper_types(emit_t *emit, bool do_viper_types) {
|
||||
emit->do_viper_types = do_viper_types;
|
||||
STATIC void emit_native_set_native_type(emit_t *emit, mp_uint_t op, mp_uint_t arg1, qstr arg2) {
|
||||
switch (op) {
|
||||
case MP_EMIT_NATIVE_TYPE_ENABLE:
|
||||
emit->do_viper_types = arg1;
|
||||
break;
|
||||
|
||||
default: {
|
||||
vtype_kind_t type;
|
||||
switch (arg2) {
|
||||
case MP_QSTR_object: type = VTYPE_PYOBJ; break;
|
||||
case MP_QSTR_bool: type = VTYPE_BOOL; break;
|
||||
case MP_QSTR_int: type = VTYPE_INT; break;
|
||||
case MP_QSTR_uint: type = VTYPE_UINT; break;
|
||||
default: printf("ViperTypeError: unknown type %s\n", qstr_str(arg2)); return;
|
||||
}
|
||||
if (op == MP_EMIT_NATIVE_TYPE_RETURN) {
|
||||
emit->return_vtype = type;
|
||||
} else {
|
||||
assert(arg1 < emit->local_vtype_alloc);
|
||||
emit->local_vtype[arg1] = type;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
|
||||
@ -214,23 +239,14 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
|
||||
emit->stack_info = m_new(stack_info_t, emit->stack_info_alloc);
|
||||
}
|
||||
|
||||
if (emit->do_viper_types) {
|
||||
// TODO set types of arguments based on type signature
|
||||
for (int i = 0; i < emit->local_vtype_alloc; i++) {
|
||||
emit->local_vtype[i] = VTYPE_UNBOUND;
|
||||
}
|
||||
for (int i = 0; i < emit->stack_info_alloc; i++) {
|
||||
emit->stack_info[i].kind = STACK_VALUE;
|
||||
emit->stack_info[i].vtype = VTYPE_UNBOUND;
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < emit->local_vtype_alloc; i++) {
|
||||
emit->local_vtype[i] = VTYPE_PYOBJ;
|
||||
}
|
||||
for (int i = 0; i < emit->stack_info_alloc; i++) {
|
||||
emit->stack_info[i].kind = STACK_VALUE;
|
||||
emit->stack_info[i].vtype = VTYPE_PYOBJ;
|
||||
}
|
||||
// set default type for return and arguments
|
||||
emit->return_vtype = VTYPE_PYOBJ;
|
||||
for (int i = 0; i < emit->local_vtype_alloc; i++) {
|
||||
emit->local_vtype[i] = VTYPE_PYOBJ;
|
||||
}
|
||||
for (int i = 0; i < emit->stack_info_alloc; i++) {
|
||||
emit->stack_info[i].kind = STACK_VALUE;
|
||||
emit->stack_info[i].vtype = VTYPE_PYOBJ;
|
||||
}
|
||||
|
||||
#if N_X64
|
||||
@ -310,11 +326,20 @@ STATIC void emit_native_end_pass(emit_t *emit) {
|
||||
if (emit->pass == MP_PASS_EMIT) {
|
||||
#if N_X64
|
||||
void *f = asm_x64_get_code(emit->as);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY, f, asm_x64_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
mp_uint_t f_len = asm_x64_get_code_size(emit->as);
|
||||
#elif N_THUMB
|
||||
void *f = asm_thumb_get_code(emit->as);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY, f, asm_thumb_get_code_size(emit->as), emit->scope->num_pos_args);
|
||||
mp_uint_t f_len = asm_thumb_get_code_size(emit->as);
|
||||
#endif
|
||||
|
||||
// compute type signature
|
||||
// TODO check that viper types here convert correctly to valid types for emit glue
|
||||
mp_uint_t type_sig = emit->return_vtype & 3;
|
||||
for (mp_uint_t i = 0; i < emit->scope->num_pos_args; i++) {
|
||||
type_sig |= (emit->local_vtype[i] & 3) << (i * 2 + 2);
|
||||
}
|
||||
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, emit->do_viper_types ? MP_CODE_NATIVE_VIPER : MP_CODE_NATIVE_PY, f, f_len, emit->scope->num_pos_args, type_sig);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1394,7 +1419,9 @@ STATIC void emit_native_return_value(emit_t *emit) {
|
||||
vtype_kind_t vtype;
|
||||
emit_pre_pop_reg(emit, &vtype, REG_RET);
|
||||
if (emit->do_viper_types) {
|
||||
assert(vtype == VTYPE_PTR_NONE);
|
||||
if (vtype != emit->return_vtype) {
|
||||
printf("ViperTypeError: incompatible return type\n");
|
||||
}
|
||||
} else {
|
||||
assert(vtype == VTYPE_PYOBJ);
|
||||
}
|
||||
@ -1444,7 +1471,7 @@ STATIC void emit_native_end_except_handler(emit_t *emit) {
|
||||
}
|
||||
|
||||
const emit_method_table_t EXPORT_FUN(method_table) = {
|
||||
emit_native_set_viper_types,
|
||||
emit_native_set_native_type,
|
||||
emit_native_start_pass,
|
||||
emit_native_end_pass,
|
||||
emit_native_last_emit_was_return_value,
|
||||
|
@ -111,6 +111,9 @@
|
||||
#define MICROPY_EMIT_INLINE_THUMB (0)
|
||||
#endif
|
||||
|
||||
// Convenience definition for whether any native emitter is enabled
|
||||
#define MICROPY_EMIT_NATIVE (MICROPY_EMIT_X64 || MICROPY_EMIT_THUMB)
|
||||
|
||||
/*****************************************************************************/
|
||||
/* Compiler configuration */
|
||||
|
||||
|
1
py/obj.h
1
py/obj.h
@ -378,6 +378,7 @@ mp_obj_t mp_obj_new_exception_args(const mp_obj_type_t *exc_type, uint n_args, c
|
||||
mp_obj_t mp_obj_new_exception_msg(const mp_obj_type_t *exc_type, const char *msg);
|
||||
mp_obj_t mp_obj_new_exception_msg_varg(const mp_obj_type_t *exc_type, const char *fmt, ...); // counts args by number of % symbols in fmt, excluding %%; can only handle void* sizes (ie no float/double!)
|
||||
mp_obj_t mp_obj_new_fun_bc(uint scope_flags, qstr *args, uint n_pos_args, uint n_kwonly_args, mp_obj_t def_args, mp_obj_t def_kw_args, const byte *code);
|
||||
mp_obj_t mp_obj_new_fun_viper(uint n_args, void *fun, mp_uint_t type_sig);
|
||||
mp_obj_t mp_obj_new_fun_asm(uint n_args, void *fun);
|
||||
mp_obj_t mp_obj_new_gen_wrap(mp_obj_t fun);
|
||||
mp_obj_t mp_obj_new_closure(mp_obj_t fun, uint n_closed, const mp_obj_t *closed);
|
||||
|
83
py/objfun.c
83
py/objfun.c
@ -500,9 +500,90 @@ mp_obj_t mp_obj_new_fun_bc(uint scope_flags, qstr *args, uint n_pos_args, uint n
|
||||
return o;
|
||||
}
|
||||
|
||||
/******************************************************************************/
|
||||
/* viper functions */
|
||||
|
||||
#if MICROPY_EMIT_NATIVE
|
||||
|
||||
typedef struct _mp_obj_fun_viper_t {
|
||||
mp_obj_base_t base;
|
||||
int n_args;
|
||||
void *fun;
|
||||
mp_uint_t type_sig;
|
||||
} mp_obj_fun_viper_t;
|
||||
|
||||
typedef mp_uint_t (*viper_fun_0_t)();
|
||||
typedef mp_uint_t (*viper_fun_1_t)(mp_uint_t);
|
||||
typedef mp_uint_t (*viper_fun_2_t)(mp_uint_t, mp_uint_t);
|
||||
typedef mp_uint_t (*viper_fun_3_t)(mp_uint_t, mp_uint_t, mp_uint_t);
|
||||
|
||||
// convert a Micro Python object to a valid value for viper, based on wanted type
|
||||
STATIC mp_uint_t convert_obj_for_viper(mp_obj_t obj, mp_uint_t type) {
|
||||
switch (type & 3) {
|
||||
case MP_NATIVE_TYPE_OBJ: return (mp_uint_t)obj;
|
||||
case MP_NATIVE_TYPE_BOOL:
|
||||
case MP_NATIVE_TYPE_INT:
|
||||
case MP_NATIVE_TYPE_UINT: return mp_obj_get_int(obj);
|
||||
default: assert(0); return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// convert a return value from viper to a Micro Python object based on viper return type
|
||||
STATIC mp_obj_t convert_val_from_viper(mp_uint_t val, mp_uint_t type) {
|
||||
switch (type & 3) {
|
||||
case MP_NATIVE_TYPE_OBJ: return (mp_obj_t)val;
|
||||
case MP_NATIVE_TYPE_BOOL: return MP_BOOL(val);
|
||||
case MP_NATIVE_TYPE_INT: return mp_obj_new_int(val);
|
||||
case MP_NATIVE_TYPE_UINT: return mp_obj_new_int_from_uint(val);
|
||||
default: assert(0); return mp_const_none;
|
||||
}
|
||||
}
|
||||
|
||||
STATIC mp_obj_t fun_viper_call(mp_obj_t self_in, uint n_args, uint n_kw, const mp_obj_t *args) {
|
||||
mp_obj_fun_viper_t *self = self_in;
|
||||
|
||||
mp_arg_check_num(n_args, n_kw, self->n_args, self->n_args, false);
|
||||
|
||||
mp_uint_t ret;
|
||||
if (n_args == 0) {
|
||||
ret = ((viper_fun_0_t)self->fun)();
|
||||
} else if (n_args == 1) {
|
||||
ret = ((viper_fun_1_t)self->fun)(convert_obj_for_viper(args[0], self->type_sig >> 2));
|
||||
} else if (n_args == 2) {
|
||||
ret = ((viper_fun_2_t)self->fun)(convert_obj_for_viper(args[0], self->type_sig >> 2), convert_obj_for_viper(args[1], self->type_sig >> 4));
|
||||
} else if (n_args == 3) {
|
||||
ret = ((viper_fun_3_t)self->fun)(convert_obj_for_viper(args[0], self->type_sig >> 2), convert_obj_for_viper(args[1], self->type_sig >> 4), convert_obj_for_viper(args[2], self->type_sig >> 6));
|
||||
} else {
|
||||
assert(0);
|
||||
ret = 0;
|
||||
}
|
||||
|
||||
return convert_val_from_viper(ret, self->type_sig);
|
||||
}
|
||||
|
||||
STATIC const mp_obj_type_t mp_type_fun_viper = {
|
||||
{ &mp_type_type },
|
||||
.name = MP_QSTR_function,
|
||||
.call = fun_viper_call,
|
||||
.binary_op = fun_binary_op,
|
||||
};
|
||||
|
||||
mp_obj_t mp_obj_new_fun_viper(uint n_args, void *fun, mp_uint_t type_sig) {
|
||||
mp_obj_fun_viper_t *o = m_new_obj(mp_obj_fun_viper_t);
|
||||
o->base.type = &mp_type_fun_viper;
|
||||
o->n_args = n_args;
|
||||
o->fun = fun;
|
||||
o->type_sig = type_sig;
|
||||
return o;
|
||||
}
|
||||
|
||||
#endif // MICROPY_EMIT_NATIVE
|
||||
|
||||
/******************************************************************************/
|
||||
/* inline assembler functions */
|
||||
|
||||
#if MICROPY_EMIT_INLINE_THUMB
|
||||
|
||||
typedef struct _mp_obj_fun_asm_t {
|
||||
mp_obj_base_t base;
|
||||
int n_args;
|
||||
@ -603,3 +684,5 @@ mp_obj_t mp_obj_new_fun_asm(uint n_args, void *fun) {
|
||||
o->fun = fun;
|
||||
return o;
|
||||
}
|
||||
|
||||
#endif // MICROPY_EMIT_INLINE_THUMB
|
||||
|
@ -578,7 +578,7 @@ mp_parse_node_t mp_parse(mp_lexer_t *lex, mp_parse_input_kind_t input_kind, mp_p
|
||||
// never emit these rules if they have only 1 argument
|
||||
// NOTE: can't put atom_paren here because we need it to distinguisg, for example, [a,b] from [(a,b)]
|
||||
// TODO possibly put varargslist_name, varargslist_equal here as well
|
||||
if (rule->rule_id == RULE_else_stmt || rule->rule_id == RULE_testlist_comp_3b || rule->rule_id == RULE_import_as_names_paren || rule->rule_id == RULE_typedargslist_name || rule->rule_id == RULE_typedargslist_colon || rule->rule_id == RULE_typedargslist_equal || rule->rule_id == RULE_dictorsetmaker_colon || rule->rule_id == RULE_classdef_2 || rule->rule_id == RULE_with_item_as || rule->rule_id == RULE_assert_stmt_extra || rule->rule_id == RULE_as_name || rule->rule_id == RULE_raise_stmt_from || rule->rule_id == RULE_vfpdef) {
|
||||
if (rule->rule_id == RULE_else_stmt || rule->rule_id == RULE_testlist_comp_3b || rule->rule_id == RULE_import_as_names_paren || rule->rule_id == RULE_typedargslist_name || rule->rule_id == RULE_typedargslist_colon || rule->rule_id == RULE_typedargslist_equal || rule->rule_id == RULE_dictorsetmaker_colon || rule->rule_id == RULE_classdef_2 || rule->rule_id == RULE_with_item_as || rule->rule_id == RULE_assert_stmt_extra || rule->rule_id == RULE_as_name || rule->rule_id == RULE_raise_stmt_from || rule->rule_id == RULE_vfpdef || rule->rule_id == RULE_funcdefrettype) {
|
||||
emit_rule = false;
|
||||
}
|
||||
|
||||
|
@ -67,9 +67,13 @@ Q(__lt__)
|
||||
|
||||
Q(micropython)
|
||||
Q(bytecode)
|
||||
Q(const)
|
||||
|
||||
#if MICROPY_EMIT_X64 || MICROPY_EMIT_THUMB
|
||||
Q(native)
|
||||
Q(viper)
|
||||
Q(const)
|
||||
Q(uint)
|
||||
#endif
|
||||
|
||||
#if MICROPY_EMIT_INLINE_THUMB
|
||||
Q(asm_thumb)
|
||||
|
@ -39,6 +39,12 @@
|
||||
*/
|
||||
#define MP_SCOPE_FLAG_NOFREE 0x40
|
||||
|
||||
// types for native (viper) function signature
|
||||
#define MP_NATIVE_TYPE_OBJ (0x00)
|
||||
#define MP_NATIVE_TYPE_BOOL (0x01)
|
||||
#define MP_NATIVE_TYPE_INT (0x02)
|
||||
#define MP_NATIVE_TYPE_UINT (0x03)
|
||||
|
||||
typedef enum {
|
||||
MP_UNARY_OP_BOOL, // __bool__
|
||||
MP_UNARY_OP_LEN, // __len__
|
||||
|
20
tests/micropython/viper.py
Normal file
20
tests/micropython/viper.py
Normal file
@ -0,0 +1,20 @@
|
||||
import micropython
|
||||
|
||||
# viper function taking and returning ints
|
||||
@micropython.viper
|
||||
def f(x:int, y:int) -> int:
|
||||
return x + y + 3
|
||||
|
||||
# viper function taking and returning objects
|
||||
@micropython.viper
|
||||
def g(x:object, y:object) -> object:
|
||||
return x + y
|
||||
|
||||
# this doesn't work at the moment
|
||||
#@micropython.viper
|
||||
#def g() -> uint:
|
||||
# return -1
|
||||
|
||||
print(f(1, 2))
|
||||
print(g(1, 2))
|
||||
#print(h())
|
2
tests/micropython/viper.py.exp
Normal file
2
tests/micropython/viper.py.exp
Normal file
@ -0,0 +1,2 @@
|
||||
6
|
||||
3
|
Loading…
Reference in New Issue
Block a user