py/emitnative: Implement yield and yield-from in native emitter.
This commit adds first class support for yield and yield-from in the native emitter, including send and throw support, and yields enclosed in exception handlers (which requires pulling down the NLR stack before yielding, then rebuilding it when resuming). This has been fully tested and is working on unix x86 and x86-64, and stm32. Also basic tests have been done with the esp8266 port. Performance of existing native code is unchanged.
This commit is contained in:
parent
8fec6f5434
commit
cc2bd63c57
@ -1703,6 +1703,7 @@ STATIC void compile_yield_from(compiler_t *comp) {
|
|||||||
EMIT_ARG(get_iter, false);
|
EMIT_ARG(get_iter, false);
|
||||||
EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE);
|
EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE);
|
||||||
EMIT_ARG(yield, MP_EMIT_YIELD_FROM);
|
EMIT_ARG(yield, MP_EMIT_YIELD_FROM);
|
||||||
|
reserve_labels_for_native(comp, 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
#if MICROPY_PY_ASYNC_AWAIT
|
#if MICROPY_PY_ASYNC_AWAIT
|
||||||
@ -2634,6 +2635,7 @@ STATIC void compile_yield_expr(compiler_t *comp, mp_parse_node_struct_t *pns) {
|
|||||||
if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) {
|
if (MP_PARSE_NODE_IS_NULL(pns->nodes[0])) {
|
||||||
EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE);
|
EMIT_ARG(load_const_tok, MP_TOKEN_KW_NONE);
|
||||||
EMIT_ARG(yield, MP_EMIT_YIELD_VALUE);
|
EMIT_ARG(yield, MP_EMIT_YIELD_VALUE);
|
||||||
|
reserve_labels_for_native(comp, 1);
|
||||||
} else if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_yield_arg_from)) {
|
} else if (MP_PARSE_NODE_IS_STRUCT_KIND(pns->nodes[0], PN_yield_arg_from)) {
|
||||||
pns = (mp_parse_node_struct_t*)pns->nodes[0];
|
pns = (mp_parse_node_struct_t*)pns->nodes[0];
|
||||||
compile_node(comp, pns->nodes[0]);
|
compile_node(comp, pns->nodes[0]);
|
||||||
@ -2641,6 +2643,7 @@ STATIC void compile_yield_expr(compiler_t *comp, mp_parse_node_struct_t *pns) {
|
|||||||
} else {
|
} else {
|
||||||
compile_node(comp, pns->nodes[0]);
|
compile_node(comp, pns->nodes[0]);
|
||||||
EMIT_ARG(yield, MP_EMIT_YIELD_VALUE);
|
EMIT_ARG(yield, MP_EMIT_YIELD_VALUE);
|
||||||
|
reserve_labels_for_native(comp, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2873,6 +2876,7 @@ STATIC void compile_scope_comp_iter(compiler_t *comp, mp_parse_node_struct_t *pn
|
|||||||
compile_node(comp, pn_inner_expr);
|
compile_node(comp, pn_inner_expr);
|
||||||
if (comp->scope_cur->kind == SCOPE_GEN_EXPR) {
|
if (comp->scope_cur->kind == SCOPE_GEN_EXPR) {
|
||||||
EMIT_ARG(yield, MP_EMIT_YIELD_VALUE);
|
EMIT_ARG(yield, MP_EMIT_YIELD_VALUE);
|
||||||
|
reserve_labels_for_native(comp, 1);
|
||||||
EMIT(pop_top);
|
EMIT(pop_top);
|
||||||
} else {
|
} else {
|
||||||
EMIT_ARG(store_comp, comp->scope_cur->kind, 4 * for_depth + 5);
|
EMIT_ARG(store_comp, comp->scope_cur->kind, 4 * for_depth + 5);
|
||||||
|
@ -136,6 +136,10 @@ mp_obj_t mp_make_function_from_raw_code(const mp_raw_code_t *rc, mp_obj_t def_ar
|
|||||||
case MP_CODE_NATIVE_PY:
|
case MP_CODE_NATIVE_PY:
|
||||||
case MP_CODE_NATIVE_VIPER:
|
case MP_CODE_NATIVE_VIPER:
|
||||||
fun = mp_obj_new_fun_native(def_args, def_kw_args, rc->data.u_native.fun_data, rc->data.u_native.const_table);
|
fun = mp_obj_new_fun_native(def_args, def_kw_args, rc->data.u_native.fun_data, rc->data.u_native.const_table);
|
||||||
|
// Check for a generator function, and if so change the type of the object
|
||||||
|
if ((rc->scope_flags & MP_SCOPE_FLAG_GENERATOR) != 0) {
|
||||||
|
((mp_obj_base_t*)MP_OBJ_TO_PTR(fun))->type = &mp_type_native_gen_wrap;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
#endif
|
#endif
|
||||||
#if MICROPY_EMIT_INLINE_ASM
|
#if MICROPY_EMIT_INLINE_ASM
|
||||||
|
345
py/emitnative.c
345
py/emitnative.c
@ -65,6 +65,14 @@
|
|||||||
// emit->stack_start: Python object stack | emit->n_state
|
// emit->stack_start: Python object stack | emit->n_state
|
||||||
// locals (reversed, L0 at end) |
|
// locals (reversed, L0 at end) |
|
||||||
//
|
//
|
||||||
|
// C stack layout for native generator functions:
|
||||||
|
// 0=emit->stack_start: nlr_buf_t
|
||||||
|
//
|
||||||
|
// Then REG_GENERATOR_STATE points to:
|
||||||
|
// 0=emit->code_state_start: mp_code_state_t
|
||||||
|
// emit->stack_start: Python object stack | emit->n_state
|
||||||
|
// locals (reversed, L0 at end) |
|
||||||
|
//
|
||||||
// C stack layout for viper functions:
|
// C stack layout for viper functions:
|
||||||
// 0: nlr_buf_t [optional]
|
// 0: nlr_buf_t [optional]
|
||||||
// emit->code_state_start: fun_obj, old_globals [optional]
|
// emit->code_state_start: fun_obj, old_globals [optional]
|
||||||
@ -81,12 +89,12 @@
|
|||||||
|
|
||||||
// Whether the native/viper function needs to be wrapped in an exception handler
|
// Whether the native/viper function needs to be wrapped in an exception handler
|
||||||
#define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
|
#define NEED_GLOBAL_EXC_HANDLER(emit) ((emit)->scope->exc_stack_size > 0 \
|
||||||
|| ((emit)->scope->scope_flags & MP_SCOPE_FLAG_REFGLOBALS))
|
|| ((emit)->scope->scope_flags & (MP_SCOPE_FLAG_GENERATOR | MP_SCOPE_FLAG_REFGLOBALS)))
|
||||||
|
|
||||||
// Whether registers can be used to store locals (only true if there are no
|
// Whether registers can be used to store locals (only true if there are no
|
||||||
// exception handlers, because otherwise an nlr_jump will restore registers to
|
// exception handlers, because otherwise an nlr_jump will restore registers to
|
||||||
// their state at the start of the function and updates to locals will be lost)
|
// their state at the start of the function and updates to locals will be lost)
|
||||||
#define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0)
|
#define CAN_USE_REGS_FOR_LOCALS(emit) ((emit)->scope->exc_stack_size == 0 && !(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR))
|
||||||
|
|
||||||
// Indices within the local C stack for various variables
|
// Indices within the local C stack for various variables
|
||||||
#define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
|
#define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
|
||||||
@ -95,18 +103,14 @@
|
|||||||
#define LOCAL_IDX_RET_VAL(emit) (NLR_BUF_IDX_LOCAL_3)
|
#define LOCAL_IDX_RET_VAL(emit) (NLR_BUF_IDX_LOCAL_3)
|
||||||
#define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t))
|
#define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, fun_bc) / sizeof(uintptr_t))
|
||||||
#define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t))
|
#define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t))
|
||||||
|
#define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t))
|
||||||
#define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
|
#define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
|
||||||
|
|
||||||
|
#define REG_GENERATOR_STATE (REG_LOCAL_3)
|
||||||
|
|
||||||
// number of arguments to viper functions are limited to this value
|
// number of arguments to viper functions are limited to this value
|
||||||
#define REG_ARG_NUM (4)
|
#define REG_ARG_NUM (4)
|
||||||
|
|
||||||
// define additional generic helper macros
|
|
||||||
#define ASM_MOV_LOCAL_IMM_VIA(as, local_num, imm, reg_temp) \
|
|
||||||
do { \
|
|
||||||
ASM_MOV_REG_IMM((as), (reg_temp), (imm)); \
|
|
||||||
ASM_MOV_LOCAL_REG((as), (local_num), (reg_temp)); \
|
|
||||||
} while (false)
|
|
||||||
|
|
||||||
#define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
|
#define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
|
||||||
*emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
|
*emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
|
||||||
} while (0)
|
} while (0)
|
||||||
@ -202,6 +206,7 @@ struct _emit_t {
|
|||||||
exc_stack_entry_t *exc_stack;
|
exc_stack_entry_t *exc_stack;
|
||||||
|
|
||||||
int prelude_offset;
|
int prelude_offset;
|
||||||
|
int start_offset;
|
||||||
int n_state;
|
int n_state;
|
||||||
uint16_t code_state_start;
|
uint16_t code_state_start;
|
||||||
uint16_t stack_start;
|
uint16_t stack_start;
|
||||||
@ -252,6 +257,37 @@ STATIC void emit_call_with_imm_arg(emit_t *emit, mp_fun_kind_t fun_kind, mp_int_
|
|||||||
STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
|
STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
|
||||||
STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
|
STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num);
|
||||||
|
|
||||||
|
STATIC void emit_native_mov_state_reg(emit_t *emit, int local_num, int reg_src) {
|
||||||
|
if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
|
||||||
|
ASM_STORE_REG_REG_OFFSET(emit->as, reg_src, REG_GENERATOR_STATE, local_num);
|
||||||
|
} else {
|
||||||
|
ASM_MOV_LOCAL_REG(emit->as, local_num, reg_src);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
STATIC void emit_native_mov_reg_state(emit_t *emit, int reg_dest, int local_num) {
|
||||||
|
if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
|
||||||
|
ASM_LOAD_REG_REG_OFFSET(emit->as, reg_dest, REG_GENERATOR_STATE, local_num);
|
||||||
|
} else {
|
||||||
|
ASM_MOV_REG_LOCAL(emit->as, reg_dest, local_num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
STATIC void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local_num) {
|
||||||
|
if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
|
||||||
|
ASM_MOV_REG_IMM(emit->as, reg_dest, local_num * ASM_WORD_SIZE);
|
||||||
|
ASM_ADD_REG_REG(emit->as, reg_dest, REG_GENERATOR_STATE);
|
||||||
|
} else {
|
||||||
|
ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, local_num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#define emit_native_mov_state_imm_via(emit, local_num, imm, reg_temp) \
|
||||||
|
do { \
|
||||||
|
ASM_MOV_REG_IMM((emit)->as, (reg_temp), (imm)); \
|
||||||
|
emit_native_mov_state_reg((emit), (local_num), (reg_temp)); \
|
||||||
|
} while (false)
|
||||||
|
|
||||||
STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
|
STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scope) {
|
||||||
DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
|
DEBUG_printf("start_pass(pass=%u, scope=%p)\n", pass, scope);
|
||||||
|
|
||||||
@ -392,7 +428,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
|
|||||||
if (i < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit) && (i != 2 || emit->scope->num_pos_args == 3)) {
|
if (i < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit) && (i != 2 || emit->scope->num_pos_args == 3)) {
|
||||||
ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
|
ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
|
||||||
} else {
|
} else {
|
||||||
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_LOCAL_VAR(emit, i), r);
|
emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Get 3rd local from the stack back into REG_LOCAL_3 if this reg couldn't be written to above
|
// Get 3rd local from the stack back into REG_LOCAL_3 if this reg couldn't be written to above
|
||||||
@ -406,11 +442,32 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
|
|||||||
// work out size of state (locals plus stack)
|
// work out size of state (locals plus stack)
|
||||||
emit->n_state = scope->num_locals + scope->stack_size;
|
emit->n_state = scope->num_locals + scope->stack_size;
|
||||||
|
|
||||||
// the locals and stack start after the code_state structure
|
if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
|
||||||
emit->stack_start = emit->code_state_start + sizeof(mp_code_state_t) / sizeof(mp_uint_t);
|
emit->code_state_start = 0;
|
||||||
|
emit->stack_start = sizeof(mp_code_state_t) / sizeof(mp_uint_t);
|
||||||
|
mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->prelude_offset);
|
||||||
|
mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (uintptr_t)emit->start_offset);
|
||||||
|
ASM_ENTRY(emit->as, sizeof(nlr_buf_t) / sizeof(uintptr_t));
|
||||||
|
|
||||||
// allocate space on C-stack for code_state structure, which includes state
|
// Put address of code_state into REG_GENERATOR_STATE
|
||||||
ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
|
#if N_X86
|
||||||
|
asm_x86_mov_arg_to_r32(emit->as, 0, REG_GENERATOR_STATE);
|
||||||
|
#else
|
||||||
|
ASM_MOV_REG_REG(emit->as, REG_GENERATOR_STATE, REG_ARG_1);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
// Put throw value into LOCAL_IDX_EXC_VAL slot, for yield/yield-from
|
||||||
|
#if N_X86
|
||||||
|
asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_2);
|
||||||
|
#endif
|
||||||
|
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_VAL(emit), REG_ARG_2);
|
||||||
|
} else {
|
||||||
|
// The locals and stack start after the code_state structure
|
||||||
|
emit->stack_start = emit->code_state_start + sizeof(mp_code_state_t) / sizeof(mp_uint_t);
|
||||||
|
|
||||||
|
// Allocate space on C-stack for code_state structure, which includes state
|
||||||
|
ASM_ENTRY(emit->as, emit->stack_start + emit->n_state);
|
||||||
|
}
|
||||||
|
|
||||||
// TODO don't load r7 if we don't need it
|
// TODO don't load r7 if we don't need it
|
||||||
#if N_THUMB
|
#if N_THUMB
|
||||||
@ -421,33 +478,35 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
|
|||||||
ASM_MOV_REG_IMM(emit->as, ASM_XTENSA_REG_A15, (uint32_t)mp_fun_table);
|
ASM_MOV_REG_IMM(emit->as, ASM_XTENSA_REG_A15, (uint32_t)mp_fun_table);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// prepare incoming arguments for call to mp_setup_code_state
|
if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
|
||||||
|
// Prepare incoming arguments for call to mp_setup_code_state
|
||||||
|
|
||||||
#if N_X86
|
#if N_X86
|
||||||
asm_x86_mov_arg_to_r32(emit->as, 0, REG_ARG_1);
|
asm_x86_mov_arg_to_r32(emit->as, 0, REG_ARG_1);
|
||||||
asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_2);
|
asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_2);
|
||||||
asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_3);
|
asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_3);
|
||||||
asm_x86_mov_arg_to_r32(emit->as, 3, REG_ARG_4);
|
asm_x86_mov_arg_to_r32(emit->as, 3, REG_ARG_4);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// set code_state.fun_bc
|
// Set code_state.fun_bc
|
||||||
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_ARG_1);
|
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_ARG_1);
|
||||||
|
|
||||||
// set code_state.ip (offset from start of this function to prelude info)
|
// Set code_state.ip (offset from start of this function to prelude info)
|
||||||
// XXX this encoding may change size
|
// TODO this encoding may change size in the final pass, need to make it fixed
|
||||||
ASM_MOV_LOCAL_IMM_VIA(emit->as, emit->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t), emit->prelude_offset, REG_ARG_1);
|
emit_native_mov_state_imm_via(emit, emit->code_state_start + offsetof(mp_code_state_t, ip) / sizeof(uintptr_t), emit->prelude_offset, REG_ARG_1);
|
||||||
|
|
||||||
// put address of code_state into first arg
|
// Put address of code_state into first arg
|
||||||
ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
|
ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, emit->code_state_start);
|
||||||
|
|
||||||
// call mp_setup_code_state to prepare code_state structure
|
// Call mp_setup_code_state to prepare code_state structure
|
||||||
#if N_THUMB
|
#if N_THUMB
|
||||||
asm_thumb_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
|
asm_thumb_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_THUMB_REG_R4);
|
||||||
#elif N_ARM
|
#elif N_ARM
|
||||||
asm_arm_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
|
asm_arm_bl_ind(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE, ASM_ARM_REG_R4);
|
||||||
#else
|
#else
|
||||||
ASM_CALL_IND(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE);
|
ASM_CALL_IND(emit->as, mp_fun_table[MP_F_SETUP_CODE_STATE], MP_F_SETUP_CODE_STATE);
|
||||||
#endif
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
emit_native_global_exc_entry(emit);
|
emit_native_global_exc_entry(emit);
|
||||||
|
|
||||||
@ -631,7 +690,7 @@ STATIC void need_reg_single(emit_t *emit, int reg_needed, int skip_stack_pos) {
|
|||||||
stack_info_t *si = &emit->stack_info[i];
|
stack_info_t *si = &emit->stack_info[i];
|
||||||
if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
|
if (si->kind == STACK_REG && si->data.u_reg == reg_needed) {
|
||||||
si->kind = STACK_VALUE;
|
si->kind = STACK_VALUE;
|
||||||
ASM_MOV_LOCAL_REG(emit->as, emit->stack_start + i, si->data.u_reg);
|
emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -642,7 +701,7 @@ STATIC void need_reg_all(emit_t *emit) {
|
|||||||
stack_info_t *si = &emit->stack_info[i];
|
stack_info_t *si = &emit->stack_info[i];
|
||||||
if (si->kind == STACK_REG) {
|
if (si->kind == STACK_REG) {
|
||||||
si->kind = STACK_VALUE;
|
si->kind = STACK_VALUE;
|
||||||
ASM_MOV_LOCAL_REG(emit->as, emit->stack_start + i, si->data.u_reg);
|
emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -654,7 +713,7 @@ STATIC void need_stack_settled(emit_t *emit) {
|
|||||||
if (si->kind == STACK_REG) {
|
if (si->kind == STACK_REG) {
|
||||||
DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
|
DEBUG_printf(" reg(%u) to local(%u)\n", si->data.u_reg, emit->stack_start + i);
|
||||||
si->kind = STACK_VALUE;
|
si->kind = STACK_VALUE;
|
||||||
ASM_MOV_LOCAL_REG(emit->as, emit->stack_start + i, si->data.u_reg);
|
emit_native_mov_state_reg(emit, emit->stack_start + i, si->data.u_reg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (int i = 0; i < emit->stack_size; i++) {
|
for (int i = 0; i < emit->stack_size; i++) {
|
||||||
@ -662,7 +721,7 @@ STATIC void need_stack_settled(emit_t *emit) {
|
|||||||
if (si->kind == STACK_IMM) {
|
if (si->kind == STACK_IMM) {
|
||||||
DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
|
DEBUG_printf(" imm(" INT_FMT ") to local(%u)\n", si->data.u_imm, emit->stack_start + i);
|
||||||
si->kind = STACK_VALUE;
|
si->kind = STACK_VALUE;
|
||||||
ASM_MOV_LOCAL_IMM_VIA(emit->as, emit->stack_start + i, si->data.u_imm, REG_TEMP0);
|
emit_native_mov_state_imm_via(emit, emit->stack_start + i, si->data.u_imm, REG_TEMP0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -674,7 +733,7 @@ STATIC void emit_access_stack(emit_t *emit, int pos, vtype_kind_t *vtype, int re
|
|||||||
*vtype = si->vtype;
|
*vtype = si->vtype;
|
||||||
switch (si->kind) {
|
switch (si->kind) {
|
||||||
case STACK_VALUE:
|
case STACK_VALUE:
|
||||||
ASM_MOV_REG_LOCAL(emit->as, reg_dest, emit->stack_start + emit->stack_size - pos);
|
emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - pos);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case STACK_REG:
|
case STACK_REG:
|
||||||
@ -696,7 +755,7 @@ STATIC void emit_fold_stack_top(emit_t *emit, int reg_dest) {
|
|||||||
si[0] = si[1];
|
si[0] = si[1];
|
||||||
if (si->kind == STACK_VALUE) {
|
if (si->kind == STACK_VALUE) {
|
||||||
// if folded element was on the stack we need to put it in a register
|
// if folded element was on the stack we need to put it in a register
|
||||||
ASM_MOV_REG_LOCAL(emit->as, reg_dest, emit->stack_start + emit->stack_size - 1);
|
emit_native_mov_reg_state(emit, reg_dest, emit->stack_start + emit->stack_size - 1);
|
||||||
si->kind = STACK_REG;
|
si->kind = STACK_REG;
|
||||||
si->data.u_reg = reg_dest;
|
si->data.u_reg = reg_dest;
|
||||||
}
|
}
|
||||||
@ -819,19 +878,19 @@ STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_de
|
|||||||
si->kind = STACK_VALUE;
|
si->kind = STACK_VALUE;
|
||||||
switch (si->vtype) {
|
switch (si->vtype) {
|
||||||
case VTYPE_PYOBJ:
|
case VTYPE_PYOBJ:
|
||||||
ASM_MOV_LOCAL_IMM_VIA(emit->as, emit->stack_start + emit->stack_size - 1 - i, si->data.u_imm, reg_dest);
|
emit_native_mov_state_imm_via(emit, emit->stack_start + emit->stack_size - 1 - i, si->data.u_imm, reg_dest);
|
||||||
break;
|
break;
|
||||||
case VTYPE_BOOL:
|
case VTYPE_BOOL:
|
||||||
if (si->data.u_imm == 0) {
|
if (si->data.u_imm == 0) {
|
||||||
ASM_MOV_LOCAL_IMM_VIA(emit->as, emit->stack_start + emit->stack_size - 1 - i, (mp_uint_t)mp_const_false, reg_dest);
|
emit_native_mov_state_imm_via(emit, emit->stack_start + emit->stack_size - 1 - i, (mp_uint_t)mp_const_false, reg_dest);
|
||||||
} else {
|
} else {
|
||||||
ASM_MOV_LOCAL_IMM_VIA(emit->as, emit->stack_start + emit->stack_size - 1 - i, (mp_uint_t)mp_const_true, reg_dest);
|
emit_native_mov_state_imm_via(emit, emit->stack_start + emit->stack_size - 1 - i, (mp_uint_t)mp_const_true, reg_dest);
|
||||||
}
|
}
|
||||||
si->vtype = VTYPE_PYOBJ;
|
si->vtype = VTYPE_PYOBJ;
|
||||||
break;
|
break;
|
||||||
case VTYPE_INT:
|
case VTYPE_INT:
|
||||||
case VTYPE_UINT:
|
case VTYPE_UINT:
|
||||||
ASM_MOV_LOCAL_IMM_VIA(emit->as, emit->stack_start + emit->stack_size - 1 - i, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm), reg_dest);
|
emit_native_mov_state_imm_via(emit, emit->stack_start + emit->stack_size - 1 - i, (uintptr_t)MP_OBJ_NEW_SMALL_INT(si->data.u_imm), reg_dest);
|
||||||
si->vtype = VTYPE_PYOBJ;
|
si->vtype = VTYPE_PYOBJ;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
@ -849,9 +908,9 @@ STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_de
|
|||||||
stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
|
stack_info_t *si = &emit->stack_info[emit->stack_size - 1 - i];
|
||||||
if (si->vtype != VTYPE_PYOBJ) {
|
if (si->vtype != VTYPE_PYOBJ) {
|
||||||
mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
|
mp_uint_t local_num = emit->stack_start + emit->stack_size - 1 - i;
|
||||||
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, local_num);
|
emit_native_mov_reg_state(emit, REG_ARG_1, local_num);
|
||||||
emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
|
emit_call_with_imm_arg(emit, MP_F_CONVERT_NATIVE_TO_OBJ, si->vtype, REG_ARG_2); // arg2 = type
|
||||||
ASM_MOV_LOCAL_REG(emit->as, local_num, REG_RET);
|
emit_native_mov_state_reg(emit, local_num, REG_RET);
|
||||||
si->vtype = VTYPE_PYOBJ;
|
si->vtype = VTYPE_PYOBJ;
|
||||||
DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
|
DEBUG_printf(" convert_native_to_obj(local_num=" UINT_FMT ")\n", local_num);
|
||||||
}
|
}
|
||||||
@ -859,7 +918,7 @@ STATIC void emit_get_stack_pointer_to_reg_for_pop(emit_t *emit, mp_uint_t reg_de
|
|||||||
|
|
||||||
// Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
|
// Adujust the stack for a pop of n_pop items, and load the stack pointer into reg_dest.
|
||||||
adjust_stack(emit, -n_pop);
|
adjust_stack(emit, -n_pop);
|
||||||
ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, emit->stack_start + emit->stack_size);
|
emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
// vtype of all n_push objects is VTYPE_PYOBJ
|
// vtype of all n_push objects is VTYPE_PYOBJ
|
||||||
@ -870,7 +929,7 @@ STATIC void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, mp_uint_t reg_d
|
|||||||
emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
|
emit->stack_info[emit->stack_size + i].kind = STACK_VALUE;
|
||||||
emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
|
emit->stack_info[emit->stack_size + i].vtype = VTYPE_PYOBJ;
|
||||||
}
|
}
|
||||||
ASM_MOV_REG_LOCAL_ADDR(emit->as, reg_dest, emit->stack_start + emit->stack_size);
|
emit_native_mov_reg_state_addr(emit, reg_dest, emit->stack_start + emit->stack_size);
|
||||||
adjust_stack(emit, n_push);
|
adjust_stack(emit, n_push);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -932,7 +991,7 @@ STATIC void emit_load_reg_with_ptr(emit_t *emit, int reg, mp_uint_t ptr, size_t
|
|||||||
if (emit->pass == MP_PASS_EMIT) {
|
if (emit->pass == MP_PASS_EMIT) {
|
||||||
emit->const_table[table_off] = ptr;
|
emit->const_table[table_off] = ptr;
|
||||||
}
|
}
|
||||||
ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
|
emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
|
||||||
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t));
|
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, offsetof(mp_obj_fun_bc_t, const_table) / sizeof(uintptr_t));
|
||||||
ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
|
ASM_LOAD_REG_REG_OFFSET(emit->as, reg, REG_TEMP0, table_off);
|
||||||
}
|
}
|
||||||
@ -985,17 +1044,21 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) {
|
|||||||
mp_uint_t start_label = *emit->label_slot + 2;
|
mp_uint_t start_label = *emit->label_slot + 2;
|
||||||
mp_uint_t global_except_label = *emit->label_slot + 3;
|
mp_uint_t global_except_label = *emit->label_slot + 3;
|
||||||
|
|
||||||
// Set new globals
|
if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
|
||||||
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
|
// Set new globals
|
||||||
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, offsetof(mp_obj_fun_bc_t, globals) / sizeof(uintptr_t));
|
emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_FUN_OBJ(emit));
|
||||||
emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
|
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_ARG_1, offsetof(mp_obj_fun_bc_t, globals) / sizeof(uintptr_t));
|
||||||
|
emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
|
||||||
|
|
||||||
// Save old globals (or NULL if globals didn't change)
|
// Save old globals (or NULL if globals didn't change)
|
||||||
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
|
emit_native_mov_state_reg(emit, LOCAL_IDX_OLD_GLOBALS(emit), REG_RET);
|
||||||
|
}
|
||||||
|
|
||||||
if (emit->scope->exc_stack_size == 0) {
|
if (emit->scope->exc_stack_size == 0) {
|
||||||
// Optimisation: if globals didn't change don't push the nlr context
|
if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
|
||||||
ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
|
// Optimisation: if globals didn't change don't push the nlr context
|
||||||
|
ASM_JUMP_IF_REG_ZERO(emit->as, REG_RET, start_label, false);
|
||||||
|
}
|
||||||
|
|
||||||
// Wrap everything in an nlr context
|
// Wrap everything in an nlr context
|
||||||
ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
|
ASM_MOV_REG_LOCAL_ADDR(emit->as, REG_ARG_1, 0);
|
||||||
@ -1028,16 +1091,41 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) {
|
|||||||
ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
|
ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Restore old globals
|
if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
|
||||||
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
|
// Restore old globals
|
||||||
emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
|
emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
|
||||||
|
emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
|
||||||
|
}
|
||||||
|
|
||||||
// Re-raise exception out to caller
|
if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
|
||||||
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
|
// Store return value in state[0]
|
||||||
emit_call(emit, MP_F_NATIVE_RAISE);
|
ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_EXC_VAL(emit));
|
||||||
|
ASM_STORE_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, offsetof(mp_code_state_t, state) / sizeof(uintptr_t));
|
||||||
|
|
||||||
|
// Load return kind
|
||||||
|
ASM_MOV_REG_IMM(emit->as, REG_RET, MP_VM_RETURN_EXCEPTION);
|
||||||
|
|
||||||
|
ASM_EXIT(emit->as);
|
||||||
|
} else {
|
||||||
|
// Re-raise exception out to caller
|
||||||
|
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
|
||||||
|
emit_call(emit, MP_F_NATIVE_RAISE);
|
||||||
|
}
|
||||||
|
|
||||||
// Label for start of function
|
// Label for start of function
|
||||||
emit_native_label_assign(emit, start_label);
|
emit_native_label_assign(emit, start_label);
|
||||||
|
|
||||||
|
if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
|
||||||
|
emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_GEN_PC(emit));
|
||||||
|
ASM_JUMP_REG(emit->as, REG_TEMP0);
|
||||||
|
emit->start_offset = mp_asm_base_get_code_pos(&emit->as->base);
|
||||||
|
|
||||||
|
// This is the first entry of the generator
|
||||||
|
|
||||||
|
// Check LOCAL_IDX_EXC_VAL for any injected value
|
||||||
|
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
|
||||||
|
emit_call(emit, MP_F_NATIVE_RAISE);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1047,22 +1135,26 @@ STATIC void emit_native_global_exc_exit(emit_t *emit) {
|
|||||||
|
|
||||||
if (NEED_GLOBAL_EXC_HANDLER(emit)) {
|
if (NEED_GLOBAL_EXC_HANDLER(emit)) {
|
||||||
// Get old globals
|
// Get old globals
|
||||||
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
|
if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
|
||||||
|
emit_native_mov_reg_state(emit, REG_ARG_1, LOCAL_IDX_OLD_GLOBALS(emit));
|
||||||
|
|
||||||
if (emit->scope->exc_stack_size == 0) {
|
if (emit->scope->exc_stack_size == 0) {
|
||||||
// Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
|
// Optimisation: if globals didn't change then don't restore them and don't do nlr_pop
|
||||||
ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
|
ASM_JUMP_IF_REG_ZERO(emit->as, REG_ARG_1, emit->exit_label + 1, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Restore old globals
|
||||||
|
emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Restore old globals
|
|
||||||
emit_call(emit, MP_F_NATIVE_SWAP_GLOBALS);
|
|
||||||
|
|
||||||
// Pop the nlr context
|
// Pop the nlr context
|
||||||
emit_call(emit, MP_F_NLR_POP);
|
emit_call(emit, MP_F_NLR_POP);
|
||||||
|
|
||||||
if (emit->scope->exc_stack_size == 0) {
|
if (!(emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR)) {
|
||||||
// Destination label for above optimisation
|
if (emit->scope->exc_stack_size == 0) {
|
||||||
emit_native_label_assign(emit, emit->exit_label + 1);
|
// Destination label for above optimisation
|
||||||
|
emit_native_label_assign(emit, emit->exit_label + 1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load return value
|
// Load return value
|
||||||
@ -1212,7 +1304,7 @@ STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
|
|||||||
emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
|
emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
|
||||||
} else {
|
} else {
|
||||||
need_reg_single(emit, REG_TEMP0, 0);
|
need_reg_single(emit, REG_TEMP0, 0);
|
||||||
ASM_MOV_REG_LOCAL(emit->as, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
|
emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_LOCAL_VAR(emit, local_num));
|
||||||
emit_post_push_reg(emit, vtype, REG_TEMP0);
|
emit_post_push_reg(emit, vtype, REG_TEMP0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1431,7 +1523,7 @@ STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num)
|
|||||||
emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
|
emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
|
||||||
} else {
|
} else {
|
||||||
emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
|
emit_pre_pop_reg(emit, &vtype, REG_TEMP0);
|
||||||
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
|
emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, local_num), REG_TEMP0);
|
||||||
}
|
}
|
||||||
emit_post(emit);
|
emit_post(emit);
|
||||||
|
|
||||||
@ -2464,6 +2556,22 @@ STATIC void emit_native_call_method(emit_t *emit, mp_uint_t n_positional, mp_uin
|
|||||||
|
|
||||||
STATIC void emit_native_return_value(emit_t *emit) {
|
STATIC void emit_native_return_value(emit_t *emit) {
|
||||||
DEBUG_printf("return_value\n");
|
DEBUG_printf("return_value\n");
|
||||||
|
|
||||||
|
if (emit->scope->scope_flags & MP_SCOPE_FLAG_GENERATOR) {
|
||||||
|
// Save pointer to current stack position for caller to access return value
|
||||||
|
emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
|
||||||
|
emit_native_mov_state_reg(emit, offsetof(mp_code_state_t, sp) / sizeof(uintptr_t), REG_TEMP0);
|
||||||
|
|
||||||
|
// Put return type in return value slot
|
||||||
|
ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_NORMAL);
|
||||||
|
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
|
||||||
|
|
||||||
|
// Do the unwinding jump to get to the return handler
|
||||||
|
emit_native_unwind_jump(emit, emit->exit_label, emit->exc_stack_size);
|
||||||
|
emit->last_emit_was_return_value = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
if (emit->do_viper_types) {
|
if (emit->do_viper_types) {
|
||||||
vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
|
vtype_kind_t return_vtype = emit->scope->scope_flags >> MP_SCOPE_FLAG_VIPERRET_POS;
|
||||||
if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
|
if (peek_vtype(emit, 0) == VTYPE_PTR_NONE) {
|
||||||
@ -2510,10 +2618,85 @@ STATIC void emit_native_raise_varargs(emit_t *emit, mp_uint_t n_args) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
STATIC void emit_native_yield(emit_t *emit, int kind) {
|
STATIC void emit_native_yield(emit_t *emit, int kind) {
|
||||||
// not supported (for now)
|
// Note: 1 (yield) or 3 (yield from) labels are reserved for this function, starting at *emit->label_slot
|
||||||
(void)emit;
|
|
||||||
(void)kind;
|
if (emit->do_viper_types) {
|
||||||
mp_raise_NotImplementedError("native yield");
|
mp_raise_NotImplementedError("native yield");
|
||||||
|
}
|
||||||
|
emit->scope->scope_flags |= MP_SCOPE_FLAG_GENERATOR;
|
||||||
|
|
||||||
|
need_stack_settled(emit);
|
||||||
|
|
||||||
|
if (kind == MP_EMIT_YIELD_FROM) {
|
||||||
|
|
||||||
|
// Top of yield-from loop, conceptually implementing:
|
||||||
|
// for item in generator:
|
||||||
|
// yield item
|
||||||
|
|
||||||
|
// Jump to start of loop
|
||||||
|
emit_native_jump(emit, *emit->label_slot + 2);
|
||||||
|
|
||||||
|
// Label for top of loop
|
||||||
|
emit_native_label_assign(emit, *emit->label_slot + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save pointer to current stack position for caller to access yielded value
|
||||||
|
emit_get_stack_pointer_to_reg_for_pop(emit, REG_TEMP0, 1);
|
||||||
|
emit_native_mov_state_reg(emit, offsetof(mp_code_state_t, sp) / sizeof(uintptr_t), REG_TEMP0);
|
||||||
|
|
||||||
|
// Put return type in return value slot
|
||||||
|
ASM_MOV_REG_IMM(emit->as, REG_TEMP0, MP_VM_RETURN_YIELD);
|
||||||
|
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_RET_VAL(emit), REG_TEMP0);
|
||||||
|
|
||||||
|
// Save re-entry PC
|
||||||
|
ASM_MOV_REG_PCREL(emit->as, REG_TEMP0, *emit->label_slot);
|
||||||
|
emit_native_mov_state_reg(emit, LOCAL_IDX_GEN_PC(emit), REG_TEMP0);
|
||||||
|
|
||||||
|
// Jump to exit handler
|
||||||
|
ASM_JUMP(emit->as, emit->exit_label);
|
||||||
|
|
||||||
|
// Label re-entry point
|
||||||
|
mp_asm_base_label_assign(&emit->as->base, *emit->label_slot);
|
||||||
|
|
||||||
|
// Re-open any active exception handler
|
||||||
|
if (emit->exc_stack_size > 0) {
|
||||||
|
// Find innermost active exception handler, to restore as current handler
|
||||||
|
exc_stack_entry_t *e = &emit->exc_stack[emit->exc_stack_size - 1];
|
||||||
|
for (; e >= emit->exc_stack; --e) {
|
||||||
|
if (e->is_active) {
|
||||||
|
// Found active handler, get its PC
|
||||||
|
ASM_MOV_REG_PCREL(emit->as, REG_RET, e->label);
|
||||||
|
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_EXC_HANDLER_PC(emit), REG_RET);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
emit_native_adjust_stack_size(emit, 1); // send_value
|
||||||
|
|
||||||
|
if (kind == MP_EMIT_YIELD_VALUE) {
|
||||||
|
// Check LOCAL_IDX_EXC_VAL for any injected value
|
||||||
|
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_1, LOCAL_IDX_EXC_VAL(emit));
|
||||||
|
emit_call(emit, MP_F_NATIVE_RAISE);
|
||||||
|
} else {
|
||||||
|
// Label loop entry
|
||||||
|
emit_native_label_assign(emit, *emit->label_slot + 2);
|
||||||
|
|
||||||
|
// Get the next item from the delegate generator
|
||||||
|
vtype_kind_t vtype;
|
||||||
|
emit_pre_pop_reg(emit, &vtype, REG_ARG_2); // send_value
|
||||||
|
emit_access_stack(emit, 1, &vtype, REG_ARG_1); // generator
|
||||||
|
ASM_MOV_REG_LOCAL(emit->as, REG_ARG_3, LOCAL_IDX_EXC_VAL(emit)); // throw_value
|
||||||
|
emit_post_push_reg(emit, VTYPE_PYOBJ, REG_ARG_3);
|
||||||
|
emit_get_stack_pointer_to_reg_for_pop(emit, REG_ARG_3, 1); // ret_value
|
||||||
|
emit_call(emit, MP_F_NATIVE_YIELD_FROM);
|
||||||
|
|
||||||
|
// If returned non-zero then generator continues
|
||||||
|
ASM_JUMP_IF_REG_NONZERO(emit->as, REG_RET, *emit->label_slot + 1, true);
|
||||||
|
|
||||||
|
// Pop exhausted gen, replace with ret_value
|
||||||
|
emit_native_adjust_stack_size(emit, 1); // ret_value
|
||||||
|
emit_fold_stack_top(emit, REG_ARG_1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
STATIC void emit_native_start_except_handler(emit_t *emit) {
|
STATIC void emit_native_start_except_handler(emit_t *emit) {
|
||||||
|
@ -66,6 +66,7 @@ STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
|
|||||||
[MP_F_SETUP_CODE_STATE] = 4,
|
[MP_F_SETUP_CODE_STATE] = 4,
|
||||||
[MP_F_SMALL_INT_FLOOR_DIVIDE] = 2,
|
[MP_F_SMALL_INT_FLOOR_DIVIDE] = 2,
|
||||||
[MP_F_SMALL_INT_MODULO] = 2,
|
[MP_F_SMALL_INT_MODULO] = 2,
|
||||||
|
[MP_F_NATIVE_YIELD_FROM] = 3,
|
||||||
};
|
};
|
||||||
|
|
||||||
#define N_X86 (1)
|
#define N_X86 (1)
|
||||||
|
@ -106,7 +106,7 @@ mp_obj_t mp_native_call_function_n_kw(mp_obj_t fun_in, size_t n_args_kw, const m
|
|||||||
// wrapper that makes raise obj and raises it
|
// wrapper that makes raise obj and raises it
|
||||||
// END_FINALLY opcode requires that we don't raise if o==None
|
// END_FINALLY opcode requires that we don't raise if o==None
|
||||||
void mp_native_raise(mp_obj_t o) {
|
void mp_native_raise(mp_obj_t o) {
|
||||||
if (o != mp_const_none) {
|
if (o != MP_OBJ_NULL && o != mp_const_none) {
|
||||||
nlr_raise(mp_make_raise_obj(o));
|
nlr_raise(mp_make_raise_obj(o));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -137,6 +137,42 @@ STATIC mp_obj_t mp_native_iternext(mp_obj_iter_buf_t *iter) {
|
|||||||
return mp_iternext(obj);
|
return mp_iternext(obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
STATIC bool mp_native_yield_from(mp_obj_t gen, mp_obj_t send_value, mp_obj_t *ret_value) {
|
||||||
|
mp_vm_return_kind_t ret_kind;
|
||||||
|
nlr_buf_t nlr_buf;
|
||||||
|
mp_obj_t throw_value = *ret_value;
|
||||||
|
if (nlr_push(&nlr_buf) == 0) {
|
||||||
|
if (throw_value != MP_OBJ_NULL) {
|
||||||
|
send_value = MP_OBJ_NULL;
|
||||||
|
}
|
||||||
|
ret_kind = mp_resume(gen, send_value, throw_value, ret_value);
|
||||||
|
nlr_pop();
|
||||||
|
} else {
|
||||||
|
ret_kind = MP_VM_RETURN_EXCEPTION;
|
||||||
|
*ret_value = nlr_buf.ret_val;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ret_kind == MP_VM_RETURN_YIELD) {
|
||||||
|
return true;
|
||||||
|
} else if (ret_kind == MP_VM_RETURN_NORMAL) {
|
||||||
|
if (*ret_value == MP_OBJ_STOP_ITERATION) {
|
||||||
|
*ret_value = mp_const_none;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
assert(ret_kind == MP_VM_RETURN_EXCEPTION);
|
||||||
|
if (!mp_obj_exception_match(*ret_value, MP_OBJ_FROM_PTR(&mp_type_StopIteration))) {
|
||||||
|
nlr_raise(*ret_value);
|
||||||
|
}
|
||||||
|
*ret_value = mp_obj_exception_get_value(*ret_value);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (throw_value != MP_OBJ_NULL && mp_obj_exception_match(throw_value, MP_OBJ_FROM_PTR(&mp_type_GeneratorExit))) {
|
||||||
|
nlr_raise(mp_make_raise_obj(throw_value));
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
// these must correspond to the respective enum in runtime0.h
|
// these must correspond to the respective enum in runtime0.h
|
||||||
void *const mp_fun_table[MP_F_NUMBER_OF] = {
|
void *const mp_fun_table[MP_F_NUMBER_OF] = {
|
||||||
mp_convert_obj_to_native,
|
mp_convert_obj_to_native,
|
||||||
@ -189,6 +225,7 @@ void *const mp_fun_table[MP_F_NUMBER_OF] = {
|
|||||||
mp_setup_code_state,
|
mp_setup_code_state,
|
||||||
mp_small_int_floor_divide,
|
mp_small_int_floor_divide,
|
||||||
mp_small_int_modulo,
|
mp_small_int_modulo,
|
||||||
|
mp_native_yield_from,
|
||||||
};
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
1
py/obj.h
1
py/obj.h
@ -558,6 +558,7 @@ extern const mp_obj_type_t mp_type_zip;
|
|||||||
extern const mp_obj_type_t mp_type_array;
|
extern const mp_obj_type_t mp_type_array;
|
||||||
extern const mp_obj_type_t mp_type_super;
|
extern const mp_obj_type_t mp_type_super;
|
||||||
extern const mp_obj_type_t mp_type_gen_wrap;
|
extern const mp_obj_type_t mp_type_gen_wrap;
|
||||||
|
extern const mp_obj_type_t mp_type_native_gen_wrap;
|
||||||
extern const mp_obj_type_t mp_type_gen_instance;
|
extern const mp_obj_type_t mp_type_gen_instance;
|
||||||
extern const mp_obj_type_t mp_type_fun_builtin_0;
|
extern const mp_obj_type_t mp_type_fun_builtin_0;
|
||||||
extern const mp_obj_type_t mp_type_fun_builtin_1;
|
extern const mp_obj_type_t mp_type_fun_builtin_1;
|
||||||
|
@ -154,7 +154,7 @@ STATIC const mp_obj_type_t mp_type_fun_native;
|
|||||||
qstr mp_obj_fun_get_name(mp_const_obj_t fun_in) {
|
qstr mp_obj_fun_get_name(mp_const_obj_t fun_in) {
|
||||||
const mp_obj_fun_bc_t *fun = MP_OBJ_TO_PTR(fun_in);
|
const mp_obj_fun_bc_t *fun = MP_OBJ_TO_PTR(fun_in);
|
||||||
#if MICROPY_EMIT_NATIVE
|
#if MICROPY_EMIT_NATIVE
|
||||||
if (fun->base.type == &mp_type_fun_native) {
|
if (fun->base.type == &mp_type_fun_native || fun->base.type == &mp_type_native_gen_wrap) {
|
||||||
// TODO native functions don't have name stored
|
// TODO native functions don't have name stored
|
||||||
return MP_QSTR_;
|
return MP_QSTR_;
|
||||||
}
|
}
|
||||||
|
@ -73,6 +73,53 @@ const mp_obj_type_t mp_type_gen_wrap = {
|
|||||||
#endif
|
#endif
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/******************************************************************************/
|
||||||
|
// native generator wrapper
|
||||||
|
|
||||||
|
#if MICROPY_EMIT_NATIVE
|
||||||
|
|
||||||
|
STATIC mp_obj_t native_gen_wrap_call(mp_obj_t self_in, size_t n_args, size_t n_kw, const mp_obj_t *args) {
|
||||||
|
// The state for a native generating function is held in the same struct as a bytecode function
|
||||||
|
mp_obj_fun_bc_t *self_fun = MP_OBJ_TO_PTR(self_in);
|
||||||
|
|
||||||
|
// Determine start of prelude, and extract n_state from it
|
||||||
|
uintptr_t prelude_offset = ((uintptr_t*)self_fun->bytecode)[0];
|
||||||
|
size_t n_state = mp_decode_uint_value(self_fun->bytecode + prelude_offset);
|
||||||
|
size_t n_exc_stack = 0;
|
||||||
|
|
||||||
|
// Allocate the generator object, with room for local stack and exception stack
|
||||||
|
mp_obj_gen_instance_t *o = m_new_obj_var(mp_obj_gen_instance_t, byte,
|
||||||
|
n_state * sizeof(mp_obj_t) + n_exc_stack * sizeof(mp_exc_stack_t));
|
||||||
|
o->base.type = &mp_type_gen_instance;
|
||||||
|
|
||||||
|
// Parse the input arguments and set up the code state
|
||||||
|
o->globals = self_fun->globals;
|
||||||
|
o->code_state.fun_bc = self_fun;
|
||||||
|
o->code_state.ip = (const byte*)prelude_offset;
|
||||||
|
mp_setup_code_state(&o->code_state, n_args, n_kw, args);
|
||||||
|
|
||||||
|
// Indicate we are a native function, which doesn't use this variable
|
||||||
|
o->code_state.exc_sp = NULL;
|
||||||
|
|
||||||
|
// Prepare the generator instance for execution
|
||||||
|
uintptr_t start_offset = ((uintptr_t*)self_fun->bytecode)[1];
|
||||||
|
o->code_state.ip = MICROPY_MAKE_POINTER_CALLABLE((void*)(self_fun->bytecode + start_offset));
|
||||||
|
|
||||||
|
return MP_OBJ_FROM_PTR(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
const mp_obj_type_t mp_type_native_gen_wrap = {
|
||||||
|
{ &mp_type_type },
|
||||||
|
.name = MP_QSTR_generator,
|
||||||
|
.call = native_gen_wrap_call,
|
||||||
|
.unary_op = mp_generic_unary_op,
|
||||||
|
#if MICROPY_PY_FUNCTION_ATTRS
|
||||||
|
.attr = mp_obj_fun_bc_attr,
|
||||||
|
#endif
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // MICROPY_EMIT_NATIVE
|
||||||
|
|
||||||
/******************************************************************************/
|
/******************************************************************************/
|
||||||
/* generator instance */
|
/* generator instance */
|
||||||
|
|
||||||
@ -118,7 +165,22 @@ mp_vm_return_kind_t mp_obj_gen_resume(mp_obj_t self_in, mp_obj_t send_value, mp_
|
|||||||
self->code_state.old_globals = mp_globals_get();
|
self->code_state.old_globals = mp_globals_get();
|
||||||
mp_globals_set(self->globals);
|
mp_globals_set(self->globals);
|
||||||
self->globals = NULL;
|
self->globals = NULL;
|
||||||
mp_vm_return_kind_t ret_kind = mp_execute_bytecode(&self->code_state, throw_value);
|
|
||||||
|
mp_vm_return_kind_t ret_kind;
|
||||||
|
|
||||||
|
#if MICROPY_EMIT_NATIVE
|
||||||
|
if (self->code_state.exc_sp == NULL) {
|
||||||
|
// A native generator, with entry point 2 words into the "bytecode" pointer
|
||||||
|
typedef uintptr_t (*mp_fun_native_gen_t)(void*, mp_obj_t);
|
||||||
|
mp_fun_native_gen_t fun = MICROPY_MAKE_POINTER_CALLABLE((const void*)(self->code_state.fun_bc->bytecode + 2 * sizeof(uintptr_t)));
|
||||||
|
ret_kind = fun((void*)&self->code_state, throw_value);
|
||||||
|
} else
|
||||||
|
#endif
|
||||||
|
{
|
||||||
|
// A bytecode generator
|
||||||
|
ret_kind = mp_execute_bytecode(&self->code_state, throw_value);
|
||||||
|
}
|
||||||
|
|
||||||
self->globals = mp_globals_get();
|
self->globals = mp_globals_get();
|
||||||
mp_globals_set(self->code_state.old_globals);
|
mp_globals_set(self->code_state.old_globals);
|
||||||
|
|
||||||
|
@ -197,6 +197,7 @@ typedef enum {
|
|||||||
MP_F_SETUP_CODE_STATE,
|
MP_F_SETUP_CODE_STATE,
|
||||||
MP_F_SMALL_INT_FLOOR_DIVIDE,
|
MP_F_SMALL_INT_FLOOR_DIVIDE,
|
||||||
MP_F_SMALL_INT_MODULO,
|
MP_F_SMALL_INT_MODULO,
|
||||||
|
MP_F_NATIVE_YIELD_FROM,
|
||||||
MP_F_NUMBER_OF,
|
MP_F_NUMBER_OF,
|
||||||
} mp_fun_kind_t;
|
} mp_fun_kind_t;
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user