py/asm*: Support assembling code to jump to a register, and get PC+off.
Useful for position independent code, and implementing state machines.
This commit is contained in:
parent
f7d6108d1a
commit
2964b41c28
19
py/asmarm.c
19
py/asmarm.c
@ -273,6 +273,21 @@ void asm_arm_mov_reg_local_addr(asm_arm_t *as, uint rd, int local_num) {
|
||||
emit_al(as, asm_arm_op_add_imm(rd, ASM_ARM_REG_SP, local_num << 2));
|
||||
}
|
||||
|
||||
void asm_arm_mov_reg_pcrel(asm_arm_t *as, uint reg_dest, uint label) {
|
||||
assert(label < as->base.max_num_labels);
|
||||
mp_uint_t dest = as->base.label_offsets[label];
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
rel -= 12 + 8; // adjust for load of rel, and then PC+8 prefetch of add_reg_reg_reg
|
||||
|
||||
// To load rel int reg_dest, insert immediate into code and jump over it
|
||||
emit_al(as, 0x59f0000 | (reg_dest << 12)); // ldr rd, [pc]
|
||||
emit_al(as, 0xa000000); // b pc
|
||||
emit(as, rel);
|
||||
|
||||
// Do reg_dest += PC
|
||||
asm_arm_add_reg_reg_reg(as, reg_dest, reg_dest, ASM_ARM_REG_PC);
|
||||
}
|
||||
|
||||
void asm_arm_lsl_reg_reg(asm_arm_t *as, uint rd, uint rs) {
|
||||
// mov rd, rd, lsl rs
|
||||
emit_al(as, 0x1a00010 | (rd << 12) | (rs << 8) | rd);
|
||||
@ -362,4 +377,8 @@ void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp) {
|
||||
emit(as, (uint) fun_ptr);
|
||||
}
|
||||
|
||||
void asm_arm_bx_reg(asm_arm_t *as, uint reg_src) {
|
||||
emit_al(as, 0x012fff10 | reg_src);
|
||||
}
|
||||
|
||||
#endif // MICROPY_EMIT_ARM
|
||||
|
@ -98,6 +98,7 @@ void asm_arm_and_reg_reg_reg(asm_arm_t *as, uint rd, uint rn, uint rm);
|
||||
void asm_arm_eor_reg_reg_reg(asm_arm_t *as, uint rd, uint rn, uint rm);
|
||||
void asm_arm_orr_reg_reg_reg(asm_arm_t *as, uint rd, uint rn, uint rm);
|
||||
void asm_arm_mov_reg_local_addr(asm_arm_t *as, uint rd, int local_num);
|
||||
void asm_arm_mov_reg_pcrel(asm_arm_t *as, uint reg_dest, uint label);
|
||||
void asm_arm_lsl_reg_reg(asm_arm_t *as, uint rd, uint rs);
|
||||
void asm_arm_asr_reg_reg(asm_arm_t *as, uint rd, uint rs);
|
||||
|
||||
@ -121,6 +122,7 @@ void asm_arm_pop(asm_arm_t *as, uint reglist);
|
||||
void asm_arm_bcc_label(asm_arm_t *as, int cond, uint label);
|
||||
void asm_arm_b_label(asm_arm_t *as, uint label);
|
||||
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp);
|
||||
void asm_arm_bx_reg(asm_arm_t *as, uint reg_src);
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
|
||||
@ -165,6 +167,7 @@ void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp);
|
||||
asm_arm_cmp_reg_reg(as, reg1, reg2); \
|
||||
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_REG(as, reg) asm_arm_bx_reg((as), (reg))
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, ptr, idx, ASM_ARM_REG_R3)
|
||||
|
||||
#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_arm_mov_local_reg((as), (local_num), (reg_src))
|
||||
@ -173,6 +176,7 @@ void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp);
|
||||
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_arm_mov_reg_local((as), (reg_dest), (local_num))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_arm_mov_reg_reg((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_arm_mov_reg_local_addr((as), (reg_dest), (local_num))
|
||||
#define ASM_MOV_REG_PCREL(as, reg_dest, label) asm_arm_mov_reg_pcrel((as), (reg_dest), (label))
|
||||
|
||||
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_arm_lsl_reg_reg((as), (reg_dest), (reg_shift))
|
||||
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_arm_asr_reg_reg((as), (reg_dest), (reg_shift))
|
||||
|
@ -310,6 +310,15 @@ void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num)
|
||||
asm_thumb_op16(as, OP_ADD_REG_SP_OFFSET(rlo_dest, word_offset));
|
||||
}
|
||||
|
||||
void asm_thumb_mov_reg_pcrel(asm_thumb_t *as, uint rlo_dest, uint label) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
rel -= 4 + 4; // adjust for mov_reg_i16 and then PC+4 prefetch of add_reg_reg
|
||||
rel |= 1; // to stay in Thumb state when jumping to this address
|
||||
asm_thumb_mov_reg_i16(as, ASM_THUMB_OP_MOVW, rlo_dest, rel); // 4 bytes
|
||||
asm_thumb_add_reg_reg(as, rlo_dest, ASM_THUMB_REG_R15); // 2 bytes
|
||||
}
|
||||
|
||||
// this could be wrong, because it should have a range of +/- 16MiB...
|
||||
#define OP_BW_HI(byte_offset) (0xf000 | (((byte_offset) >> 12) & 0x07ff))
|
||||
#define OP_BW_LO(byte_offset) (0xb800 | (((byte_offset) >> 1) & 0x07ff))
|
||||
|
@ -180,6 +180,26 @@ void asm_thumb_format_4(asm_thumb_t *as, uint op, uint rlo_dest, uint rlo_src);
|
||||
|
||||
static inline void asm_thumb_cmp_rlo_rlo(asm_thumb_t *as, uint rlo_dest, uint rlo_src) { asm_thumb_format_4(as, ASM_THUMB_FORMAT_4_CMP, rlo_dest, rlo_src); }
|
||||
|
||||
// FORMAT 5: hi register operations (add, cmp, mov, bx)
|
||||
// For add/cmp/mov, at least one of the args must be a high register
|
||||
|
||||
#define ASM_THUMB_FORMAT_5_ADD (0x4400)
|
||||
#define ASM_THUMB_FORMAT_5_BX (0x4700)
|
||||
|
||||
#define ASM_THUMB_FORMAT_5_ENCODE(op, r_dest, r_src) \
|
||||
((op) | ((r_dest) << 4 & 0x0080) | ((r_src) << 3) | ((r_dest) & 0x0007))
|
||||
|
||||
static inline void asm_thumb_format_5(asm_thumb_t *as, uint op, uint r_dest, uint r_src) {
|
||||
asm_thumb_op16(as, ASM_THUMB_FORMAT_5_ENCODE(op, r_dest, r_src));
|
||||
}
|
||||
|
||||
static inline void asm_thumb_add_reg_reg(asm_thumb_t *as, uint r_dest, uint r_src) {
|
||||
asm_thumb_format_5(as, ASM_THUMB_FORMAT_5_ADD, r_dest, r_src);
|
||||
}
|
||||
static inline void asm_thumb_bx_reg(asm_thumb_t *as, uint r_src) {
|
||||
asm_thumb_format_5(as, ASM_THUMB_FORMAT_5_BX, 0, r_src);
|
||||
}
|
||||
|
||||
// FORMAT 9: load/store with immediate offset
|
||||
// For word transfers the offset must be aligned, and >>2
|
||||
|
||||
@ -233,6 +253,7 @@ void asm_thumb_mov_reg_i32_aligned(asm_thumb_t *as, uint reg_dest, int i32); //
|
||||
void asm_thumb_mov_local_reg(asm_thumb_t *as, int local_num_dest, uint rlo_src); // convenience
|
||||
void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience
|
||||
void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num); // convenience
|
||||
void asm_thumb_mov_reg_pcrel(asm_thumb_t *as, uint rlo_dest, uint label);
|
||||
|
||||
void asm_thumb_b_label(asm_thumb_t *as, uint label); // convenience: picks narrow or wide branch
|
||||
void asm_thumb_bcc_label(asm_thumb_t *as, int cc, uint label); // convenience: picks narrow or wide branch
|
||||
@ -282,6 +303,7 @@ void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp
|
||||
asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
|
||||
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_REG(as, reg) asm_thumb_bx_reg((as), (reg))
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, ASM_THUMB_REG_R3)
|
||||
|
||||
#define ASM_MOV_LOCAL_REG(as, local_num, reg) asm_thumb_mov_local_reg((as), (local_num), (reg))
|
||||
@ -290,6 +312,7 @@ void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp
|
||||
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_thumb_mov_reg_local((as), (reg_dest), (local_num))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_thumb_mov_reg_local_addr((as), (reg_dest), (local_num))
|
||||
#define ASM_MOV_REG_PCREL(as, rlo_dest, label) asm_thumb_mov_reg_pcrel((as), (rlo_dest), (label))
|
||||
|
||||
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift))
|
||||
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift))
|
||||
|
14
py/asmx64.c
14
py/asmx64.c
@ -76,6 +76,7 @@
|
||||
#define OPCODE_TEST_R64_WITH_RM64 (0x85) /* /r */
|
||||
#define OPCODE_JMP_REL8 (0xeb)
|
||||
#define OPCODE_JMP_REL32 (0xe9)
|
||||
#define OPCODE_JMP_RM64 (0xff) /* /4 */
|
||||
#define OPCODE_JCC_REL8 (0x70) /* | jcc type */
|
||||
#define OPCODE_JCC_REL32_A (0x0f)
|
||||
#define OPCODE_JCC_REL32_B (0x80) /* | jcc type */
|
||||
@ -481,6 +482,11 @@ void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8) {
|
||||
asm_x64_write_byte_3(as, OPCODE_SETCC_RM8_A, OPCODE_SETCC_RM8_B | jcc_type, MODRM_R64(0) | MODRM_RM_REG | MODRM_RM_R64(dest_r8));
|
||||
}
|
||||
|
||||
void asm_x64_jmp_reg(asm_x64_t *as, int src_r64) {
|
||||
assert(src_r64 < 8);
|
||||
asm_x64_write_byte_2(as, OPCODE_JMP_RM64, MODRM_R64(4) | MODRM_RM_REG | MODRM_RM_R64(src_r64));
|
||||
}
|
||||
|
||||
STATIC mp_uint_t get_label_dest(asm_x64_t *as, mp_uint_t label) {
|
||||
assert(label < as->base.max_num_labels);
|
||||
return as->base.label_offsets[label];
|
||||
@ -582,6 +588,14 @@ void asm_x64_mov_local_addr_to_r64(asm_x64_t *as, int local_num, int dest_r64) {
|
||||
}
|
||||
}
|
||||
|
||||
void asm_x64_mov_reg_pcrel(asm_x64_t *as, int dest_r64, mp_uint_t label) {
|
||||
assert(dest_r64 < 8);
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - (as->base.code_offset + 7);
|
||||
asm_x64_write_byte_3(as, REX_PREFIX | REX_W, OPCODE_LEA_MEM_TO_R64, MODRM_R64(dest_r64) | MODRM_RM_R64(5));
|
||||
asm_x64_write_word32(as, rel);
|
||||
}
|
||||
|
||||
/*
|
||||
void asm_x64_push_local(asm_x64_t *as, int local_num) {
|
||||
asm_x64_push_disp(as, ASM_X64_REG_RBP, asm_x64_local_offset_from_ebp(as, local_num));
|
||||
|
@ -106,6 +106,7 @@ void asm_x64_cmp_r64_with_r64(asm_x64_t* as, int src_r64_a, int src_r64_b);
|
||||
void asm_x64_test_r8_with_r8(asm_x64_t* as, int src_r64_a, int src_r64_b);
|
||||
void asm_x64_test_r64_with_r64(asm_x64_t *as, int src_r64_a, int src_r64_b);
|
||||
void asm_x64_setcc_r8(asm_x64_t* as, int jcc_type, int dest_r8);
|
||||
void asm_x64_jmp_reg(asm_x64_t *as, int src_r64);
|
||||
void asm_x64_jmp_label(asm_x64_t* as, mp_uint_t label);
|
||||
void asm_x64_jcc_label(asm_x64_t* as, int jcc_type, mp_uint_t label);
|
||||
void asm_x64_entry(asm_x64_t* as, int num_locals);
|
||||
@ -113,6 +114,7 @@ void asm_x64_exit(asm_x64_t* as);
|
||||
void asm_x64_mov_local_to_r64(asm_x64_t* as, int src_local_num, int dest_r64);
|
||||
void asm_x64_mov_r64_to_local(asm_x64_t* as, int src_r64, int dest_local_num);
|
||||
void asm_x64_mov_local_addr_to_r64(asm_x64_t* as, int local_num, int dest_r64);
|
||||
void asm_x64_mov_reg_pcrel(asm_x64_t *as, int dest_r64, mp_uint_t label);
|
||||
void asm_x64_call_ind(asm_x64_t* as, void* ptr, int temp_r32);
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
@ -169,6 +171,7 @@ void asm_x64_call_ind(asm_x64_t* as, void* ptr, int temp_r32);
|
||||
asm_x64_cmp_r64_with_r64(as, reg1, reg2); \
|
||||
asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_REG(as, reg) asm_x64_jmp_reg((as), (reg))
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_x64_call_ind(as, ptr, ASM_X64_REG_RAX)
|
||||
|
||||
#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_x64_mov_r64_to_local((as), (reg_src), (local_num))
|
||||
@ -177,6 +180,7 @@ void asm_x64_call_ind(asm_x64_t* as, void* ptr, int temp_r32);
|
||||
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_x64_mov_local_to_r64((as), (local_num), (reg_dest))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_x64_mov_local_addr_to_r64((as), (local_num), (reg_dest))
|
||||
#define ASM_MOV_REG_PCREL(as, reg_dest, label) asm_x64_mov_reg_pcrel((as), (reg_dest), (label))
|
||||
|
||||
#define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg))
|
||||
#define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg))
|
||||
|
16
py/asmx86.c
16
py/asmx86.c
@ -76,6 +76,7 @@
|
||||
#define OPCODE_TEST_R32_WITH_RM32 (0x85) /* /r */
|
||||
#define OPCODE_JMP_REL8 (0xeb)
|
||||
#define OPCODE_JMP_REL32 (0xe9)
|
||||
#define OPCODE_JMP_RM32 (0xff) /* /4 */
|
||||
#define OPCODE_JCC_REL8 (0x70) /* | jcc type */
|
||||
#define OPCODE_JCC_REL32_A (0x0f)
|
||||
#define OPCODE_JCC_REL32_B (0x80) /* | jcc type */
|
||||
@ -343,6 +344,10 @@ void asm_x86_setcc_r8(asm_x86_t *as, mp_uint_t jcc_type, int dest_r8) {
|
||||
asm_x86_write_byte_3(as, OPCODE_SETCC_RM8_A, OPCODE_SETCC_RM8_B | jcc_type, MODRM_R32(0) | MODRM_RM_REG | MODRM_RM_R32(dest_r8));
|
||||
}
|
||||
|
||||
void asm_x86_jmp_reg(asm_x86_t *as, int src_r32) {
|
||||
asm_x86_write_byte_2(as, OPCODE_JMP_RM32, MODRM_R32(4) | MODRM_RM_REG | MODRM_RM_R32(src_r32));
|
||||
}
|
||||
|
||||
STATIC mp_uint_t get_label_dest(asm_x86_t *as, mp_uint_t label) {
|
||||
assert(label < as->base.max_num_labels);
|
||||
return as->base.label_offsets[label];
|
||||
@ -462,6 +467,17 @@ void asm_x86_mov_local_addr_to_r32(asm_x86_t *as, int local_num, int dest_r32) {
|
||||
}
|
||||
}
|
||||
|
||||
void asm_x86_mov_reg_pcrel(asm_x86_t *as, int dest_r32, mp_uint_t label) {
|
||||
asm_x86_write_byte_1(as, OPCODE_CALL_REL32);
|
||||
asm_x86_write_word32(as, 0);
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
asm_x86_pop_r32(as, dest_r32);
|
||||
// PC rel is usually a forward reference, so need to assume it's large
|
||||
asm_x86_write_byte_2(as, OPCODE_ADD_I32_TO_RM32, MODRM_R32(0) | MODRM_RM_REG | MODRM_RM_R32(dest_r32));
|
||||
asm_x86_write_word32(as, rel);
|
||||
}
|
||||
|
||||
#if 0
|
||||
void asm_x86_push_local(asm_x86_t *as, int local_num) {
|
||||
asm_x86_push_disp(as, ASM_X86_REG_EBP, asm_x86_local_offset_from_ebp(as, local_num));
|
||||
|
@ -103,6 +103,7 @@ void asm_x86_cmp_r32_with_r32(asm_x86_t* as, int src_r32_a, int src_r32_b);
|
||||
void asm_x86_test_r8_with_r8(asm_x86_t* as, int src_r32_a, int src_r32_b);
|
||||
void asm_x86_test_r32_with_r32(asm_x86_t* as, int src_r32_a, int src_r32_b);
|
||||
void asm_x86_setcc_r8(asm_x86_t* as, mp_uint_t jcc_type, int dest_r8);
|
||||
void asm_x86_jmp_reg(asm_x86_t *as, int src_r86);
|
||||
void asm_x86_jmp_label(asm_x86_t* as, mp_uint_t label);
|
||||
void asm_x86_jcc_label(asm_x86_t* as, mp_uint_t jcc_type, mp_uint_t label);
|
||||
void asm_x86_entry(asm_x86_t* as, int num_locals);
|
||||
@ -111,6 +112,7 @@ void asm_x86_mov_arg_to_r32(asm_x86_t *as, int src_arg_num, int dest_r32);
|
||||
void asm_x86_mov_local_to_r32(asm_x86_t* as, int src_local_num, int dest_r32);
|
||||
void asm_x86_mov_r32_to_local(asm_x86_t* as, int src_r32, int dest_local_num);
|
||||
void asm_x86_mov_local_addr_to_r32(asm_x86_t* as, int local_num, int dest_r32);
|
||||
void asm_x86_mov_reg_pcrel(asm_x86_t *as, int dest_r64, mp_uint_t label);
|
||||
void asm_x86_call_ind(asm_x86_t* as, void* ptr, mp_uint_t n_args, int temp_r32);
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
@ -167,6 +169,7 @@ void asm_x86_call_ind(asm_x86_t* as, void* ptr, mp_uint_t n_args, int temp_r32);
|
||||
asm_x86_cmp_r32_with_r32(as, reg1, reg2); \
|
||||
asm_x86_jcc_label(as, ASM_X86_CC_JE, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_REG(as, reg) asm_x86_jmp_reg((as), (reg))
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_x86_call_ind(as, ptr, mp_f_n_args[idx], ASM_X86_REG_EAX)
|
||||
|
||||
#define ASM_MOV_LOCAL_REG(as, local_num, reg_src) asm_x86_mov_r32_to_local((as), (reg_src), (local_num))
|
||||
@ -175,6 +178,7 @@ void asm_x86_call_ind(asm_x86_t* as, void* ptr, mp_uint_t n_args, int temp_r32);
|
||||
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_x86_mov_local_to_r32((as), (local_num), (reg_dest))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x86_mov_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_x86_mov_local_addr_to_r32((as), (local_num), (reg_dest))
|
||||
#define ASM_MOV_REG_PCREL(as, reg_dest, label) asm_x86_mov_reg_pcrel((as), (reg_dest), (label))
|
||||
|
||||
#define ASM_LSL_REG(as, reg) asm_x86_shl_r32_cl((as), (reg))
|
||||
#define ASM_ASR_REG(as, reg) asm_x86_sar_r32_cl((as), (reg))
|
||||
|
@ -182,4 +182,26 @@ void asm_xtensa_mov_reg_local_addr(asm_xtensa_t *as, uint reg_dest, int local_nu
|
||||
asm_xtensa_op_addi(as, reg_dest, reg_dest, (4 + local_num) * WORD_SIZE);
|
||||
}
|
||||
|
||||
void asm_xtensa_mov_reg_pcrel(asm_xtensa_t *as, uint reg_dest, uint label) {
|
||||
// Get relative offset from PC
|
||||
uint32_t dest = get_label_dest(as, label);
|
||||
int32_t rel = dest - as->base.code_offset;
|
||||
rel -= 3 + 3; // account for 3 bytes of movi instruction, 3 bytes call0 adjustment
|
||||
asm_xtensa_op_movi(as, reg_dest, rel); // imm has 12-bit range
|
||||
|
||||
// Use call0 to get PC+3 into a0
|
||||
// call0 destination must be aligned on 4 bytes:
|
||||
// - code_offset&3=0: off=0, pad=1
|
||||
// - code_offset&3=1: off=0, pad=0
|
||||
// - code_offset&3=2: off=1, pad=3
|
||||
// - code_offset&3=3: off=1, pad=2
|
||||
uint32_t off = as->base.code_offset >> 1 & 1;
|
||||
uint32_t pad = (5 - as->base.code_offset) & 3;
|
||||
asm_xtensa_op_call0(as, off);
|
||||
mp_asm_base_get_cur_to_write_bytes(&as->base, pad);
|
||||
|
||||
// Add PC to relative offset
|
||||
asm_xtensa_op_add(as, reg_dest, reg_dest, ASM_XTENSA_REG_A0);
|
||||
}
|
||||
|
||||
#endif // MICROPY_EMIT_XTENSA || MICROPY_EMIT_INLINE_XTENSA
|
||||
|
@ -133,6 +133,10 @@ static inline void asm_xtensa_op_bccz(asm_xtensa_t *as, uint cond, uint reg_src,
|
||||
asm_xtensa_op24(as, ASM_XTENSA_ENCODE_BRI12(6, reg_src, cond, 1, rel12 & 0xfff));
|
||||
}
|
||||
|
||||
static inline void asm_xtensa_op_call0(asm_xtensa_t *as, int32_t rel18) {
|
||||
asm_xtensa_op24(as, ASM_XTENSA_ENCODE_CALL(5, 0, rel18 & 0x3ffff));
|
||||
}
|
||||
|
||||
static inline void asm_xtensa_op_callx0(asm_xtensa_t *as, uint reg) {
|
||||
asm_xtensa_op24(as, ASM_XTENSA_ENCODE_CALLX(0, 0, 0, 0, reg, 3, 0));
|
||||
}
|
||||
@ -238,6 +242,7 @@ void asm_xtensa_mov_reg_i32(asm_xtensa_t *as, uint reg_dest, uint32_t i32);
|
||||
void asm_xtensa_mov_local_reg(asm_xtensa_t *as, int local_num, uint reg_src);
|
||||
void asm_xtensa_mov_reg_local(asm_xtensa_t *as, uint reg_dest, int local_num);
|
||||
void asm_xtensa_mov_reg_local_addr(asm_xtensa_t *as, uint reg_dest, int local_num);
|
||||
void asm_xtensa_mov_reg_pcrel(asm_xtensa_t *as, uint reg_dest, uint label);
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
|
||||
@ -274,6 +279,7 @@ void asm_xtensa_mov_reg_local_addr(asm_xtensa_t *as, uint reg_dest, int local_nu
|
||||
asm_xtensa_bccz_reg_label(as, ASM_XTENSA_CCZ_NE, reg, label)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
asm_xtensa_bcc_reg_reg_label(as, ASM_XTENSA_CC_EQ, reg1, reg2, label)
|
||||
#define ASM_JUMP_REG(as, reg) asm_xtensa_op_jx((as), (reg))
|
||||
#define ASM_CALL_IND(as, ptr, idx) \
|
||||
do { \
|
||||
asm_xtensa_mov_reg_i32(as, ASM_XTENSA_REG_A0, (uint32_t)ptr); \
|
||||
@ -286,6 +292,7 @@ void asm_xtensa_mov_reg_local_addr(asm_xtensa_t *as, uint reg_dest, int local_nu
|
||||
#define ASM_MOV_REG_LOCAL(as, reg_dest, local_num) asm_xtensa_mov_reg_local((as), (reg_dest), (local_num))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_xtensa_op_mov_n((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_REG_LOCAL_ADDR(as, reg_dest, local_num) asm_xtensa_mov_reg_local_addr((as), (reg_dest), (local_num))
|
||||
#define ASM_MOV_REG_PCREL(as, reg_dest, label) asm_xtensa_mov_reg_pcrel((as), (reg_dest), (label))
|
||||
|
||||
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) \
|
||||
do { \
|
||||
|
Loading…
x
Reference in New Issue
Block a user