py/emitnative: Access qstr values using indirection table qstr_table.

This changes the native emitter to access qstr values using the qstr
indirection table qstr_table, but only when generating native code that
will be saved to a .mpy file.  This makes the resulting native code fully
static, ie it does not require any fix-ups or rewriting when it is
imported.

The performance of native code is more or less unchanged.  Benchmark
results on PYBv1.0 (using --via-mpy and --emit native) are:

N=100 M=100          baseline -> this-commit     diff      diff% (error%)
bm_chaos.py            407.16 ->     411.85 :   +4.69 =  +1.152% (+/-0.01%)
bm_fannkuch.py         100.89 ->     101.20 :   +0.31 =  +0.307% (+/-0.01%)
bm_fft.py             3521.17 ->    3441.72 :  -79.45 =  -2.256% (+/-0.00%)
bm_float.py           6707.29 ->    6644.83 :  -62.46 =  -0.931% (+/-0.00%)
bm_hexiom.py            55.91 ->      55.41 :   -0.50 =  -0.894% (+/-0.00%)
bm_nqueens.py         5343.54 ->    5326.17 :  -17.37 =  -0.325% (+/-0.00%)
bm_pidigits.py         603.89 ->     632.79 :  +28.90 =  +4.786% (+/-0.33%)
core_qstr.py            64.18 ->      64.09 :   -0.09 =  -0.140% (+/-0.01%)
core_yield_from.py     313.61 ->     311.11 :   -2.50 =  -0.797% (+/-0.03%)
misc_aes.py            654.29 ->     659.75 :   +5.46 =  +0.834% (+/-0.02%)
misc_mandel.py        4205.10 ->    4272.08 :  +66.98 =  +1.593% (+/-0.01%)
misc_pystone.py       3077.79 ->    3128.39 :  +50.60 =  +1.644% (+/-0.01%)
misc_raytrace.py       388.45 ->     393.71 :   +5.26 =  +1.354% (+/-0.01%)
viper_call0.py         576.83 ->     566.76 :  -10.07 =  -1.746% (+/-0.05%)
viper_call1a.py        550.39 ->     540.12 :  -10.27 =  -1.866% (+/-0.11%)
viper_call1b.py        438.32 ->     432.09 :   -6.23 =  -1.421% (+/-0.11%)
viper_call1c.py        442.96 ->     436.11 :   -6.85 =  -1.546% (+/-0.08%)
viper_call2a.py        536.31 ->     527.37 :   -8.94 =  -1.667% (+/-0.04%)
viper_call2b.py        378.99 ->     377.50 :   -1.49 =  -0.393% (+/-0.08%)

Signed-off-by: Damien George <damien@micropython.org>
This commit is contained in:
Damien George 2022-05-20 14:31:56 +10:00
parent 94955e8e3d
commit d4d53e9e11
12 changed files with 75 additions and 186 deletions

View File

@ -3323,7 +3323,7 @@ STATIC void compile_scope_inline_asm(compiler_t *comp, scope_t *scope, pass_kind
NULL, NULL,
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
0, 0,
0, 0, NULL, 0,
#endif #endif
0, comp->scope_cur->num_pos_args, type_sig); 0, comp->scope_cur->num_pos_args, type_sig);
} }

View File

@ -101,7 +101,6 @@ void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
size_t n_children, size_t n_children,
uint16_t prelude_offset, uint16_t prelude_offset,
uint16_t n_qstr, mp_qstr_link_entry_t *qstr_link,
#endif #endif
mp_uint_t scope_flags, mp_uint_t n_pos_args, mp_uint_t type_sig) { mp_uint_t scope_flags, mp_uint_t n_pos_args, mp_uint_t type_sig) {
@ -144,8 +143,6 @@ void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
rc->n_children = n_children; rc->n_children = n_children;
rc->prelude_offset = prelude_offset; rc->prelude_offset = prelude_offset;
rc->n_qstr = n_qstr;
rc->qstr_link = qstr_link;
#endif #endif
// These two entries are only needed for MP_CODE_NATIVE_ASM. // These two entries are only needed for MP_CODE_NATIVE_ASM.

View File

@ -49,11 +49,6 @@ typedef enum {
MP_CODE_NATIVE_ASM, MP_CODE_NATIVE_ASM,
} mp_raw_code_kind_t; } mp_raw_code_kind_t;
typedef struct _mp_qstr_link_entry_t {
uint16_t off;
uint16_t qst;
} mp_qstr_link_entry_t;
// compiled bytecode: instance in RAM, referenced by outer scope, usually freed after first (and only) use // compiled bytecode: instance in RAM, referenced by outer scope, usually freed after first (and only) use
// mpy file: instance in RAM, created when .mpy file is loaded (same comments as above) // mpy file: instance in RAM, created when .mpy file is loaded (same comments as above)
// frozen: instance in ROM // frozen: instance in ROM
@ -78,8 +73,6 @@ typedef struct _mp_raw_code_t {
#endif #endif
#if MICROPY_EMIT_MACHINE_CODE #if MICROPY_EMIT_MACHINE_CODE
uint16_t prelude_offset; uint16_t prelude_offset;
uint16_t n_qstr;
mp_qstr_link_entry_t *qstr_link;
#endif #endif
#endif #endif
#if MICROPY_EMIT_MACHINE_CODE #if MICROPY_EMIT_MACHINE_CODE
@ -104,7 +97,6 @@ void mp_emit_glue_assign_native(mp_raw_code_t *rc, mp_raw_code_kind_t kind, void
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
size_t n_children, size_t n_children,
uint16_t prelude_offset, uint16_t prelude_offset,
uint16_t n_qstr, mp_qstr_link_entry_t *qstr_link,
#endif #endif
mp_uint_t scope_flags, mp_uint_t n_pos_args, mp_uint_t type_sig); mp_uint_t scope_flags, mp_uint_t n_pos_args, mp_uint_t type_sig);

View File

@ -10,8 +10,6 @@
// Word indices of REG_LOCAL_x in nlr_buf_t // Word indices of REG_LOCAL_x in nlr_buf_t
#define NLR_BUF_IDX_LOCAL_1 (3) // r4 #define NLR_BUF_IDX_LOCAL_1 (3) // r4
#define NLR_BUF_IDX_LOCAL_2 (4) // r5
#define NLR_BUF_IDX_LOCAL_3 (5) // r6
#define N_ARM (1) #define N_ARM (1)
#define EXPORT_FUN(name) emit_native_arm_##name #define EXPORT_FUN(name) emit_native_arm_##name

View File

@ -63,6 +63,7 @@
// C stack layout for native functions: // C stack layout for native functions:
// 0: nlr_buf_t [optional] // 0: nlr_buf_t [optional]
// return_value [optional word]
// exc_handler_unwind [optional word] // exc_handler_unwind [optional word]
// emit->code_state_start: mp_code_state_native_t // emit->code_state_start: mp_code_state_native_t
// emit->stack_start: Python object stack | emit->n_state // emit->stack_start: Python object stack | emit->n_state
@ -70,6 +71,7 @@
// //
// C stack layout for native generator functions: // C stack layout for native generator functions:
// 0=emit->stack_start: nlr_buf_t // 0=emit->stack_start: nlr_buf_t
// return_value
// exc_handler_unwind [optional word] // exc_handler_unwind [optional word]
// //
// Then REG_GENERATOR_STATE points to: // Then REG_GENERATOR_STATE points to:
@ -79,6 +81,7 @@
// //
// C stack layout for viper functions: // C stack layout for viper functions:
// 0: nlr_buf_t [optional] // 0: nlr_buf_t [optional]
// return_value [optional word]
// exc_handler_unwind [optional word] // exc_handler_unwind [optional word]
// emit->code_state_start: fun_obj, old_globals [optional] // emit->code_state_start: fun_obj, old_globals [optional]
// emit->stack_start: Python object stack | emit->n_state // emit->stack_start: Python object stack | emit->n_state
@ -100,6 +103,7 @@
#define OFFSETOF_OBJ_FUN_BC_CONTEXT (offsetof(mp_obj_fun_bc_t, context) / sizeof(uintptr_t)) #define OFFSETOF_OBJ_FUN_BC_CONTEXT (offsetof(mp_obj_fun_bc_t, context) / sizeof(uintptr_t))
#define OFFSETOF_OBJ_FUN_BC_CHILD_TABLE (offsetof(mp_obj_fun_bc_t, child_table) / sizeof(uintptr_t)) #define OFFSETOF_OBJ_FUN_BC_CHILD_TABLE (offsetof(mp_obj_fun_bc_t, child_table) / sizeof(uintptr_t))
#define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t)) #define OFFSETOF_OBJ_FUN_BC_BYTECODE (offsetof(mp_obj_fun_bc_t, bytecode) / sizeof(uintptr_t))
#define OFFSETOF_MODULE_CONTEXT_QSTR_TABLE (offsetof(mp_module_context_t, constants.qstr_table) / sizeof(uintptr_t))
#define OFFSETOF_MODULE_CONTEXT_OBJ_TABLE (offsetof(mp_module_context_t, constants.obj_table) / sizeof(uintptr_t)) #define OFFSETOF_MODULE_CONTEXT_OBJ_TABLE (offsetof(mp_module_context_t, constants.obj_table) / sizeof(uintptr_t))
#define OFFSETOF_MODULE_CONTEXT_GLOBALS (offsetof(mp_module_context_t, module.globals) / sizeof(uintptr_t)) #define OFFSETOF_MODULE_CONTEXT_GLOBALS (offsetof(mp_module_context_t, module.globals) / sizeof(uintptr_t))
@ -134,14 +138,41 @@
// Indices within the local C stack for various variables // Indices within the local C stack for various variables
#define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL) #define LOCAL_IDX_EXC_VAL(emit) (NLR_BUF_IDX_RET_VAL)
#define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1) #define LOCAL_IDX_EXC_HANDLER_PC(emit) (NLR_BUF_IDX_LOCAL_1)
#define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (SIZEOF_NLR_BUF) // this needs a dedicated variable outside nlr_buf_t #define LOCAL_IDX_EXC_HANDLER_UNWIND(emit) (SIZEOF_NLR_BUF + 1) // this needs a dedicated variable outside nlr_buf_t
#define LOCAL_IDX_RET_VAL(emit) (NLR_BUF_IDX_LOCAL_3) #define LOCAL_IDX_RET_VAL(emit) (SIZEOF_NLR_BUF) // needed when NEED_GLOBAL_EXC_HANDLER is true
#define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC) #define LOCAL_IDX_FUN_OBJ(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_FUN_BC)
#define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP) #define LOCAL_IDX_OLD_GLOBALS(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
#define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP) #define LOCAL_IDX_GEN_PC(emit) ((emit)->code_state_start + OFFSETOF_CODE_STATE_IP)
#define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num)) #define LOCAL_IDX_LOCAL_VAR(emit, local_num) ((emit)->stack_start + (emit)->n_state - 1 - (local_num))
#if MICROPY_PERSISTENT_CODE_SAVE
// When building with the ability to save native code to .mpy files:
// - Qstrs are indirect via qstr_table, and REG_LOCAL_3 always points to qstr_table.
// - In a generator no registers are used to store locals, and REG_LOCAL_2 points to the generator state.
// - At most 2 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
#define REG_GENERATOR_STATE (REG_LOCAL_2)
#define REG_QSTR_TABLE (REG_LOCAL_3)
#define MAX_REGS_FOR_LOCAL_VARS (2)
STATIC const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2};
#else
// When building without the ability to save native code to .mpy files:
// - Qstrs values are written directly into the machine code.
// - In a generator no registers are used to store locals, and REG_LOCAL_3 points to the generator state.
// - At most 3 registers hold local variables (see CAN_USE_REGS_FOR_LOCALS for when this is possible).
#define REG_GENERATOR_STATE (REG_LOCAL_3) #define REG_GENERATOR_STATE (REG_LOCAL_3)
#define MAX_REGS_FOR_LOCAL_VARS (3)
STATIC const uint8_t reg_local_table[MAX_REGS_FOR_LOCAL_VARS] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3};
#endif
#define REG_LOCAL_LAST (reg_local_table[MAX_REGS_FOR_LOCAL_VARS - 1])
#define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \ #define EMIT_NATIVE_VIPER_TYPE_ERROR(emit, ...) do { \
*emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \ *emit->error_slot = mp_obj_new_exception_msg_varg(&mp_type_ViperTypeError, __VA_ARGS__); \
@ -245,11 +276,6 @@ struct _emit_t {
uint16_t n_info; uint16_t n_info;
uint16_t n_cell; uint16_t n_cell;
#if MICROPY_PERSISTENT_CODE_SAVE
uint16_t qstr_link_cur;
mp_qstr_link_entry_t *qstr_link;
#endif
bool last_emit_was_return_value; bool last_emit_was_return_value;
scope_t *scope; scope_t *scope;
@ -257,8 +283,7 @@ struct _emit_t {
ASM_T *as; ASM_T *as;
}; };
STATIC const uint8_t reg_local_table[REG_LOCAL_NUM] = {REG_LOCAL_1, REG_LOCAL_2, REG_LOCAL_3}; STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj);
STATIC void emit_native_global_exc_entry(emit_t *emit); STATIC void emit_native_global_exc_entry(emit_t *emit);
STATIC void emit_native_global_exc_exit(emit_t *emit); STATIC void emit_native_global_exc_exit(emit_t *emit);
STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj); STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj);
@ -319,12 +344,7 @@ STATIC void emit_native_mov_reg_state_addr(emit_t *emit, int reg_dest, int local
STATIC void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) { STATIC void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
size_t loc = ASM_MOV_REG_IMM_FIX_U16(emit->as, arg_reg, qst); ASM_LOAD16_REG_REG_OFFSET(emit->as, arg_reg, REG_QSTR_TABLE, mp_emit_common_use_qstr(emit->emit_common, qst));
size_t link_idx = emit->qstr_link_cur++;
if (emit->pass == MP_PASS_EMIT) {
emit->qstr_link[link_idx].off = loc << 2 | 1;
emit->qstr_link[link_idx].qst = qst;
}
#else #else
ASM_MOV_REG_IMM(emit->as, arg_reg, qst); ASM_MOV_REG_IMM(emit->as, arg_reg, qst);
#endif #endif
@ -332,12 +352,7 @@ STATIC void emit_native_mov_reg_qstr(emit_t *emit, int arg_reg, qstr qst) {
STATIC void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) { STATIC void emit_native_mov_reg_qstr_obj(emit_t *emit, int reg_dest, qstr qst) {
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
size_t loc = ASM_MOV_REG_IMM_FIX_WORD(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst)); emit_load_reg_with_object(emit, reg_dest, MP_OBJ_NEW_QSTR(qst));
size_t link_idx = emit->qstr_link_cur++;
if (emit->pass == MP_PASS_EMIT) {
emit->qstr_link[link_idx].off = loc << 2 | 2;
emit->qstr_link[link_idx].qst = qst;
}
#else #else
ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst)); ASM_MOV_REG_IMM(emit->as, reg_dest, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
#endif #endif
@ -355,9 +370,6 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
emit->pass = pass; emit->pass = pass;
emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER; emit->do_viper_types = scope->emit_options == MP_EMIT_OPT_VIPER;
emit->stack_size = 0; emit->stack_size = 0;
#if MICROPY_PERSISTENT_CODE_SAVE
emit->qstr_link_cur = 0;
#endif
emit->last_emit_was_return_value = false; emit->last_emit_was_return_value = false;
emit->scope = scope; emit->scope = scope;
@ -408,7 +420,8 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// Work out start of code state (mp_code_state_native_t or reduced version for viper) // Work out start of code state (mp_code_state_native_t or reduced version for viper)
emit->code_state_start = 0; emit->code_state_start = 0;
if (NEED_GLOBAL_EXC_HANDLER(emit)) { if (NEED_GLOBAL_EXC_HANDLER(emit)) {
emit->code_state_start = SIZEOF_NLR_BUF; emit->code_state_start = SIZEOF_NLR_BUF; // for nlr_buf_t
emit->code_state_start += 1; // for return_value
if (NEED_EXC_HANDLER_UNWIND(emit)) { if (NEED_EXC_HANDLER_UNWIND(emit)) {
emit->code_state_start += 1; emit->code_state_start += 1;
} }
@ -423,11 +436,11 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
int num_locals_in_regs = 0; int num_locals_in_regs = 0;
if (CAN_USE_REGS_FOR_LOCALS(emit)) { if (CAN_USE_REGS_FOR_LOCALS(emit)) {
num_locals_in_regs = scope->num_locals; num_locals_in_regs = scope->num_locals;
if (num_locals_in_regs > REG_LOCAL_NUM) { if (num_locals_in_regs > MAX_REGS_FOR_LOCAL_VARS) {
num_locals_in_regs = REG_LOCAL_NUM; num_locals_in_regs = MAX_REGS_FOR_LOCAL_VARS;
} }
// Need a spot for REG_LOCAL_3 if 4 or more args (see below) // Need a spot for REG_LOCAL_LAST (see below)
if (scope->num_pos_args >= 4) { if (scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1) {
--num_locals_in_regs; --num_locals_in_regs;
} }
} }
@ -452,6 +465,9 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
#if MICROPY_PERSISTENT_CODE_SAVE
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
#endif
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
@ -460,15 +476,15 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1); ASM_MOV_LOCAL_REG(emit->as, LOCAL_IDX_FUN_OBJ(emit), REG_PARENT_ARG_1);
} }
// Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_3 // Put n_args in REG_ARG_1, n_kw in REG_ARG_2, args array in REG_LOCAL_LAST
#if N_X86 #if N_X86
asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1); asm_x86_mov_arg_to_r32(emit->as, 1, REG_ARG_1);
asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2); asm_x86_mov_arg_to_r32(emit->as, 2, REG_ARG_2);
asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_3); asm_x86_mov_arg_to_r32(emit->as, 3, REG_LOCAL_LAST);
#else #else
ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2); ASM_MOV_REG_REG(emit->as, REG_ARG_1, REG_PARENT_ARG_2);
ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3); ASM_MOV_REG_REG(emit->as, REG_ARG_2, REG_PARENT_ARG_3);
ASM_MOV_REG_REG(emit->as, REG_LOCAL_3, REG_PARENT_ARG_4); ASM_MOV_REG_REG(emit->as, REG_LOCAL_LAST, REG_PARENT_ARG_4);
#endif #endif
// Check number of args matches this function, and call mp_arg_check_num_sig if not // Check number of args matches this function, and call mp_arg_check_num_sig if not
@ -483,21 +499,21 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// Store arguments into locals (reg or stack), converting to native if needed // Store arguments into locals (reg or stack), converting to native if needed
for (int i = 0; i < emit->scope->num_pos_args; i++) { for (int i = 0; i < emit->scope->num_pos_args; i++) {
int r = REG_ARG_1; int r = REG_ARG_1;
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_3, i); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_ARG_1, REG_LOCAL_LAST, i);
if (emit->local_vtype[i] != VTYPE_PYOBJ) { if (emit->local_vtype[i] != VTYPE_PYOBJ) {
emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2); emit_call_with_imm_arg(emit, MP_F_CONVERT_OBJ_TO_NATIVE, emit->local_vtype[i], REG_ARG_2);
r = REG_RET; r = REG_RET;
} }
// REG_LOCAL_3 points to the args array so be sure not to overwrite it if it's still needed // REG_LOCAL_LAST points to the args array so be sure not to overwrite it if it's still needed
if (i < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit) && (i != 2 || emit->scope->num_pos_args == 3)) { if (i < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit) && (i != MAX_REGS_FOR_LOCAL_VARS - 1 || emit->scope->num_pos_args == MAX_REGS_FOR_LOCAL_VARS)) {
ASM_MOV_REG_REG(emit->as, reg_local_table[i], r); ASM_MOV_REG_REG(emit->as, reg_local_table[i], r);
} else { } else {
emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r); emit_native_mov_state_reg(emit, LOCAL_IDX_LOCAL_VAR(emit, i), r);
} }
} }
// Get 3rd local from the stack back into REG_LOCAL_3 if this reg couldn't be written to above // Get local from the stack back into REG_LOCAL_LAST if this reg couldn't be written to above
if (emit->scope->num_pos_args >= 4 && CAN_USE_REGS_FOR_LOCALS(emit)) { if (emit->scope->num_pos_args >= MAX_REGS_FOR_LOCAL_VARS + 1 && CAN_USE_REGS_FOR_LOCALS(emit)) {
ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_3, LOCAL_IDX_LOCAL_VAR(emit, 2)); ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_LAST, LOCAL_IDX_LOCAL_VAR(emit, MAX_REGS_FOR_LOCAL_VARS - 1));
} }
emit_native_global_exc_entry(emit); emit_native_global_exc_entry(emit);
@ -531,6 +547,9 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit)); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_GENERATOR_STATE, LOCAL_IDX_FUN_OBJ(emit));
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
#if MICROPY_PERSISTENT_CODE_SAVE
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
#endif
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, fun_table_off); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_TEMP0, fun_table_off);
} else { } else {
@ -551,6 +570,9 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table // Load REG_FUN_TABLE with a pointer to mp_fun_table, found in the const_table
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_PARENT_ARG_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
#if MICROPY_PERSISTENT_CODE_SAVE
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_QSTR_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_QSTR_TABLE);
#endif
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_FUN_TABLE, fun_table_off);
@ -597,7 +619,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
// cache some locals in registers, but only if no exception handlers // cache some locals in registers, but only if no exception handlers
if (CAN_USE_REGS_FOR_LOCALS(emit)) { if (CAN_USE_REGS_FOR_LOCALS(emit)) {
for (int i = 0; i < REG_LOCAL_NUM && i < scope->num_locals; ++i) { for (int i = 0; i < MAX_REGS_FOR_LOCAL_VARS && i < scope->num_locals; ++i) {
ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i)); ASM_MOV_REG_LOCAL(emit->as, reg_local_table[i], LOCAL_IDX_LOCAL_VAR(emit, i));
} }
} }
@ -670,16 +692,6 @@ STATIC bool emit_native_end_pass(emit_t *emit) {
assert(emit->stack_size == 0); assert(emit->stack_size == 0);
assert(emit->exc_stack_size == 0); assert(emit->exc_stack_size == 0);
#if MICROPY_PERSISTENT_CODE_SAVE
// Allocate qstr_link table if needed
if (emit->pass == MP_PASS_CODE_SIZE) {
size_t qstr_link_alloc = emit->qstr_link_cur;
if (qstr_link_alloc > 0) {
emit->qstr_link = m_new(mp_qstr_link_entry_t, qstr_link_alloc);
}
}
#endif
if (emit->pass == MP_PASS_EMIT) { if (emit->pass == MP_PASS_EMIT) {
void *f = mp_asm_base_get_code(&emit->as->base); void *f = mp_asm_base_get_code(&emit->as->base);
mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base); mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
@ -714,7 +726,6 @@ STATIC bool emit_native_end_pass(emit_t *emit) {
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
emit->emit_common->ct_cur_child, emit->emit_common->ct_cur_child,
emit->prelude_offset, emit->prelude_offset,
emit->qstr_link_cur, emit->qstr_link,
#endif #endif
emit->scope->scope_flags, 0, 0); emit->scope->scope_flags, 0, 0);
} }
@ -1112,6 +1123,7 @@ STATIC exc_stack_entry_t *emit_native_pop_exc_stack(emit_t *emit) {
} }
STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) { STATIC void emit_load_reg_with_object(emit_t *emit, int reg, mp_obj_t obj) {
emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
size_t table_off = mp_emit_common_use_const_obj(emit->emit_common, obj); size_t table_off = mp_emit_common_use_const_obj(emit->emit_common, obj);
emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit)); emit_native_mov_reg_state(emit, REG_TEMP0, LOCAL_IDX_FUN_OBJ(emit));
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT); ASM_LOAD_REG_REG_OFFSET(emit->as, REG_TEMP0, REG_TEMP0, OFFSETOF_OBJ_FUN_BC_CONTEXT);
@ -1214,14 +1226,6 @@ STATIC void emit_native_global_exc_entry(emit_t *emit) {
// Global exception handler: check for valid exception handler // Global exception handler: check for valid exception handler
emit_native_label_assign(emit, global_except_label); emit_native_label_assign(emit, global_except_label);
#if N_NLR_SETJMP
// Reload REG_FUN_TABLE, since it may be clobbered by longjmp
size_t fun_table_off = mp_emit_common_use_const_obj(emit->emit_common, MP_OBJ_FROM_PTR(&mp_fun_table));
emit_native_mov_reg_state(emit, REG_LOCAL_1, LOCAL_IDX_FUN_OBJ(emit));
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_1, REG_LOCAL_1, OFFSETOF_OBJ_FUN_BC_CONTEXT);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_LOCAL_1, REG_LOCAL_1, OFFSETOF_MODULE_CONTEXT_OBJ_TABLE);
ASM_LOAD_REG_REG_OFFSET(emit->as, REG_FUN_TABLE, REG_LOCAL_1, fun_table_off);
#endif
ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit)); ASM_MOV_REG_LOCAL(emit->as, REG_LOCAL_1, LOCAL_IDX_EXC_HANDLER_PC(emit));
ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false); ASM_JUMP_IF_REG_NONZERO(emit->as, REG_LOCAL_1, nlr_label, false);
} }
@ -1385,7 +1389,6 @@ STATIC void emit_native_load_const_str(emit_t *emit, qstr qst) {
} }
STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) { STATIC void emit_native_load_const_obj(emit_t *emit, mp_obj_t obj) {
emit->scope->scope_flags |= MP_SCOPE_FLAG_HASCONSTS;
emit_native_pre(emit); emit_native_pre(emit);
need_reg_single(emit, REG_RET, 0); need_reg_single(emit, REG_RET, 0);
emit_load_reg_with_object(emit, REG_RET, obj); emit_load_reg_with_object(emit, REG_RET, obj);
@ -1404,7 +1407,7 @@ STATIC void emit_native_load_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("local '%q' used before type known"), qst); EMIT_NATIVE_VIPER_TYPE_ERROR(emit, MP_ERROR_TEXT("local '%q' used before type known"), qst);
} }
emit_native_pre(emit); emit_native_pre(emit);
if (local_num < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit)) { if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
emit_post_push_reg(emit, vtype, reg_local_table[local_num]); emit_post_push_reg(emit, vtype, reg_local_table[local_num]);
} else { } else {
need_reg_single(emit, REG_TEMP0, 0); need_reg_single(emit, REG_TEMP0, 0);
@ -1625,7 +1628,7 @@ STATIC void emit_native_load_subscr(emit_t *emit) {
STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) { STATIC void emit_native_store_fast(emit_t *emit, qstr qst, mp_uint_t local_num) {
vtype_kind_t vtype; vtype_kind_t vtype;
if (local_num < REG_LOCAL_NUM && CAN_USE_REGS_FOR_LOCALS(emit)) { if (local_num < MAX_REGS_FOR_LOCAL_VARS && CAN_USE_REGS_FOR_LOCALS(emit)) {
emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]); emit_pre_pop_reg(emit, &vtype, reg_local_table[local_num]);
} else { } else {
emit_pre_pop_reg(emit, &vtype, REG_TEMP0); emit_pre_pop_reg(emit, &vtype, REG_TEMP0);

View File

@ -10,8 +10,6 @@
// Word indices of REG_LOCAL_x in nlr_buf_t // Word indices of REG_LOCAL_x in nlr_buf_t
#define NLR_BUF_IDX_LOCAL_1 (3) // r4 #define NLR_BUF_IDX_LOCAL_1 (3) // r4
#define NLR_BUF_IDX_LOCAL_2 (4) // r5
#define NLR_BUF_IDX_LOCAL_3 (5) // r6
#define N_THUMB (1) #define N_THUMB (1)
#define EXPORT_FUN(name) emit_native_thumb_##name #define EXPORT_FUN(name) emit_native_thumb_##name

View File

@ -10,8 +10,6 @@
// Word indices of REG_LOCAL_x in nlr_buf_t // Word indices of REG_LOCAL_x in nlr_buf_t
#define NLR_BUF_IDX_LOCAL_1 (5) // rbx #define NLR_BUF_IDX_LOCAL_1 (5) // rbx
#define NLR_BUF_IDX_LOCAL_2 (6) // r12
#define NLR_BUF_IDX_LOCAL_3 (7) // r13
#define N_X64 (1) #define N_X64 (1)
#define EXPORT_FUN(name) emit_native_x64_##name #define EXPORT_FUN(name) emit_native_x64_##name

View File

@ -11,8 +11,6 @@
// Word indices of REG_LOCAL_x in nlr_buf_t // Word indices of REG_LOCAL_x in nlr_buf_t
#define NLR_BUF_IDX_LOCAL_1 (5) // ebx #define NLR_BUF_IDX_LOCAL_1 (5) // ebx
#define NLR_BUF_IDX_LOCAL_2 (7) // esi
#define NLR_BUF_IDX_LOCAL_3 (6) // edi
// x86 needs a table to know how many args a given function has // x86 needs a table to know how many args a given function has
STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = { STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {

View File

@ -10,8 +10,6 @@
// Word indices of REG_LOCAL_x in nlr_buf_t // Word indices of REG_LOCAL_x in nlr_buf_t
#define NLR_BUF_IDX_LOCAL_1 (8) // a12 #define NLR_BUF_IDX_LOCAL_1 (8) // a12
#define NLR_BUF_IDX_LOCAL_2 (9) // a13
#define NLR_BUF_IDX_LOCAL_3 (10) // a14
#define N_XTENSA (1) #define N_XTENSA (1)
#define EXPORT_FUN(name) emit_native_xtensa_##name #define EXPORT_FUN(name) emit_native_xtensa_##name

View File

@ -11,8 +11,6 @@
// Word indices of REG_LOCAL_x in nlr_buf_t // Word indices of REG_LOCAL_x in nlr_buf_t
#define NLR_BUF_IDX_LOCAL_1 (2 + 4) // a4 #define NLR_BUF_IDX_LOCAL_1 (2 + 4) // a4
#define NLR_BUF_IDX_LOCAL_2 (2 + 5) // a5
#define NLR_BUF_IDX_LOCAL_3 (2 + 6) // a6
#define N_NLR_SETJMP (1) #define N_NLR_SETJMP (1)
#define N_XTENSAWIN (1) #define N_XTENSAWIN (1)

View File

@ -75,38 +75,6 @@ typedef struct _reloc_info_t {
uint8_t *bss; uint8_t *bss;
} reloc_info_t; } reloc_info_t;
#if MICROPY_EMIT_THUMB
STATIC void asm_thumb_rewrite_mov(uint8_t *pc, uint16_t val) {
// high part
*(uint16_t *)pc = (*(uint16_t *)pc & 0xfbf0) | (val >> 1 & 0x0400) | (val >> 12);
// low part
*(uint16_t *)(pc + 2) = (*(uint16_t *)(pc + 2) & 0x0f00) | (val << 4 & 0x7000) | (val & 0x00ff);
}
#endif
STATIC void arch_link_qstr(uint8_t *pc, bool is_obj, qstr qst) {
mp_uint_t val = qst;
if (is_obj) {
val = (mp_uint_t)MP_OBJ_NEW_QSTR(qst);
}
#if MICROPY_EMIT_X86 || MICROPY_EMIT_X64 || MICROPY_EMIT_ARM || MICROPY_EMIT_XTENSA || MICROPY_EMIT_XTENSAWIN
pc[0] = val & 0xff;
pc[1] = (val >> 8) & 0xff;
pc[2] = (val >> 16) & 0xff;
pc[3] = (val >> 24) & 0xff;
#elif MICROPY_EMIT_THUMB
if (is_obj) {
// qstr object, movw and movt
asm_thumb_rewrite_mov(pc, val); // movw
asm_thumb_rewrite_mov(pc + 4, val >> 16); // movt
} else {
// qstr number, movw instruction
asm_thumb_rewrite_mov(pc, val); // movw
}
#endif
}
void mp_native_relocate(void *ri_in, uint8_t *text, uintptr_t reloc_text) { void mp_native_relocate(void *ri_in, uint8_t *text, uintptr_t reloc_text) {
// Relocate native code // Relocate native code
reloc_info_t *ri = ri_in; reloc_info_t *ri = ri_in;
@ -285,9 +253,6 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader) {
} else if ((off & 3) == 3) { } else if ((off & 3) == 3) {
// Generic, aligned qstr-object link // Generic, aligned qstr-object link
*(mp_obj_t *)dest = MP_OBJ_NEW_QSTR(qst); *(mp_obj_t *)dest = MP_OBJ_NEW_QSTR(qst);
} else {
// Architecture-specific link
arch_link_qstr(dest, (off & 3) == 2, qst);
} }
} }
} }
@ -424,7 +389,6 @@ STATIC mp_raw_code_t *load_raw_code(mp_reader_t *reader) {
#if MICROPY_PERSISTENT_CODE_SAVE #if MICROPY_PERSISTENT_CODE_SAVE
n_children, n_children,
prelude_offset, prelude_offset,
0, NULL,
#endif #endif
native_scope_flags, native_n_pos_args, native_type_sig native_scope_flags, native_n_pos_args, native_type_sig
); );
@ -605,11 +569,7 @@ STATIC void save_raw_code(mp_print_t *print, const mp_raw_code_t *rc) {
#if MICROPY_EMIT_MACHINE_CODE #if MICROPY_EMIT_MACHINE_CODE
if (rc->kind == MP_CODE_NATIVE_PY || rc->kind == MP_CODE_NATIVE_VIPER) { if (rc->kind == MP_CODE_NATIVE_PY || rc->kind == MP_CODE_NATIVE_VIPER) {
// Save qstr link table for native code // Save qstr link table for native code
mp_print_uint(print, rc->n_qstr); mp_print_uint(print, 0);
for (size_t i = 0; i < rc->n_qstr; ++i) {
mp_print_uint(print, rc->qstr_link[i].off);
save_qstr(print, rc->qstr_link[i].qst);
}
} }
if (rc->kind == MP_CODE_NATIVE_PY) { if (rc->kind == MP_CODE_NATIVE_PY) {

View File

@ -839,7 +839,7 @@ class RawCode(object):
print("};") print("};")
print() print()
def freeze_raw_code(self, prelude_ptr=None, qstr_links=(), type_sig=0): def freeze_raw_code(self, prelude_ptr=None, type_sig=0):
# Generate mp_raw_code_t. # Generate mp_raw_code_t.
print("static const mp_raw_code_t raw_code_%s = {" % self.escaped_name) print("static const mp_raw_code_t raw_code_%s = {" % self.escaped_name)
print(" .kind = %s," % RawCode.code_kind_str[self.code_kind]) print(" .kind = %s," % RawCode.code_kind_str[self.code_kind])
@ -879,8 +879,6 @@ class RawCode(object):
print(" #endif") print(" #endif")
print(" #if MICROPY_EMIT_MACHINE_CODE") print(" #if MICROPY_EMIT_MACHINE_CODE")
print(" .prelude_offset = %u," % self.prelude_offset) print(" .prelude_offset = %u," % self.prelude_offset)
print(" .n_qstr = %u," % len(qstr_links))
print(" .qstr_link = NULL,") # TODO
print(" #endif") print(" #endif")
print(" #endif") print(" #endif")
print(" #if MICROPY_EMIT_MACHINE_CODE") print(" #if MICROPY_EMIT_MACHINE_CODE")
@ -1038,47 +1036,6 @@ class RawCodeNative(RawCode):
ip += sz ip += sz
self.disassemble_children() self.disassemble_children()
def _asm_thumb_rewrite_mov(self, pc, val):
print(" (%u & 0xf0) | (%s >> 12)," % (self.fun_data[pc], val), end="")
print(" (%u & 0xfb) | (%s >> 9 & 0x04)," % (self.fun_data[pc + 1], val), end="")
print(" (%s & 0xff)," % (val,), end="")
print(" (%u & 0x07) | (%s >> 4 & 0x70)," % (self.fun_data[pc + 3], val))
def _link_qstr(self, pc, kind, qst):
if kind == 0:
# Generic 16-bit link
print(" %s & 0xff, %s >> 8," % (qst, qst))
return 2
else:
# Architecture-specific link
is_obj = kind == 2
if is_obj:
qst = "((uintptr_t)MP_OBJ_NEW_QSTR(%s))" % qst
if config.native_arch in (
MP_NATIVE_ARCH_X86,
MP_NATIVE_ARCH_X64,
MP_NATIVE_ARCH_ARMV6,
MP_NATIVE_ARCH_XTENSA,
MP_NATIVE_ARCH_XTENSAWIN,
):
print(
" %s & 0xff, (%s >> 8) & 0xff, (%s >> 16) & 0xff, %s >> 24,"
% (qst, qst, qst, qst)
)
return 4
elif MP_NATIVE_ARCH_ARMV6M <= config.native_arch <= MP_NATIVE_ARCH_ARMV7EMDP:
if is_obj:
# qstr object, movw and movt
self._asm_thumb_rewrite_mov(pc, qst)
self._asm_thumb_rewrite_mov(pc + 4, "(%s >> 16)" % qst)
return 8
else:
# qstr number, movw instruction
self._asm_thumb_rewrite_mov(pc, qst)
return 4
else:
assert 0
def freeze(self): def freeze(self):
if self.scope_flags & ~0x0F: if self.scope_flags & ~0x0F:
raise FreezeError("unable to freeze code with relocations") raise FreezeError("unable to freeze code with relocations")
@ -1098,21 +1055,13 @@ class RawCodeNative(RawCode):
i = 0 i = 0
qi = 0 qi = 0
while i < i_top: while i < i_top:
if qi < len(self.qstr_links) and i == self.qstr_links[qi][0]: # copy machine code (max 16 bytes)
# link qstr i16 = min(i + 16, i_top)
qi_off, qi_kind, qi_val = self.qstr_links[qi] print(" ", end="")
i += self._link_qstr(i, qi_kind, qi_val.qstr_id) for ii in range(i, i16):
qi += 1 print(" 0x%02x," % self.fun_data[ii], end="")
else: print()
# copy machine code (max 16 bytes) i = i16
i16 = min(i + 16, i_top)
if qi < len(self.qstr_links):
i16 = min(i16, self.qstr_links[qi][0])
print(" ", end="")
for ii in range(i, i16):
print(" 0x%02x," % self.fun_data[ii], end="")
print()
i = i16
print("};") print("};")
@ -1134,7 +1083,7 @@ class RawCodeNative(RawCode):
print("#endif") print("#endif")
self.freeze_children(prelude_ptr) self.freeze_children(prelude_ptr)
self.freeze_raw_code(prelude_ptr, self.qstr_links, self.type_sig) self.freeze_raw_code(prelude_ptr, self.type_sig)
class MPYSegment: class MPYSegment: