py: Move arch-specific assembler macros from emitnative to asmXXX.h.
This commit is contained in:
parent
c2713592bc
commit
c8746e1e72
80
py/asmarm.h
80
py/asmarm.h
|
@ -122,4 +122,84 @@ void asm_arm_bcc_label(asm_arm_t *as, int cond, uint label);
|
|||
void asm_arm_b_label(asm_arm_t *as, uint label);
|
||||
void asm_arm_bl_ind(asm_arm_t *as, void *fun_ptr, uint fun_id, uint reg_temp);
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
|
||||
// The following macros provide a (mostly) arch-independent API to
|
||||
// generate native code, and are used by the native emitter.
|
||||
|
||||
#define ASM_WORD_SIZE (4)
|
||||
|
||||
#define REG_RET ASM_ARM_REG_R0
|
||||
#define REG_ARG_1 ASM_ARM_REG_R0
|
||||
#define REG_ARG_2 ASM_ARM_REG_R1
|
||||
#define REG_ARG_3 ASM_ARM_REG_R2
|
||||
#define REG_ARG_4 ASM_ARM_REG_R3
|
||||
|
||||
#define REG_TEMP0 ASM_ARM_REG_R0
|
||||
#define REG_TEMP1 ASM_ARM_REG_R1
|
||||
#define REG_TEMP2 ASM_ARM_REG_R2
|
||||
|
||||
#define REG_LOCAL_1 ASM_ARM_REG_R4
|
||||
#define REG_LOCAL_2 ASM_ARM_REG_R5
|
||||
#define REG_LOCAL_3 ASM_ARM_REG_R6
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_arm_t
|
||||
#define ASM_END_PASS asm_arm_end_pass
|
||||
#define ASM_ENTRY asm_arm_entry
|
||||
#define ASM_EXIT asm_arm_exit
|
||||
|
||||
#define ASM_JUMP asm_arm_b_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_arm_cmp_reg_i8(as, reg, 0); \
|
||||
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_arm_cmp_reg_i8(as, reg, 0); \
|
||||
asm_arm_bcc_label(as, ASM_ARM_CC_NE, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_arm_cmp_reg_reg(as, reg1, reg2); \
|
||||
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, ptr, idx, ASM_ARM_REG_R3)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_arm_mov_local_reg(as, (local_num), (reg))
|
||||
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_arm_mov_reg_i32(as, (reg_temp), (imm)); \
|
||||
asm_arm_mov_local_reg(as, (local_num), (reg_temp)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_arm_mov_reg_local(as, (reg), (local_num))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_arm_mov_reg_reg((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_arm_mov_reg_local_addr(as, (reg), (local_num))
|
||||
|
||||
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_arm_lsl_reg_reg((as), (reg_dest), (reg_shift))
|
||||
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_arm_asr_reg_reg((as), (reg_dest), (reg_shift))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_arm_orr_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_arm_eor_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_arm_and_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_arm_add_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_arm_sub_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_arm_mul_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_arm_ldrb_reg_reg((as), (reg_dest), (reg_base))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_arm_ldrh_reg_reg((as), (reg_dest), (reg_base))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 0)
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_str_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_value, reg_base) asm_arm_strb_reg_reg((as), (reg_value), (reg_base))
|
||||
#define ASM_STORE16_REG_REG(as, reg_value, reg_base) asm_arm_strh_reg_reg((as), (reg_value), (reg_base))
|
||||
#define ASM_STORE32_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 0)
|
||||
|
||||
#endif // GENERIC_ASM_API
|
||||
|
||||
#endif // __MICROPY_INCLUDED_PY_ASMARM_H__
|
||||
|
|
|
@ -237,4 +237,85 @@ void asm_thumb_b_label(asm_thumb_t *as, uint label); // convenience: picks narro
|
|||
void asm_thumb_bcc_label(asm_thumb_t *as, int cc, uint label); // convenience: picks narrow or wide branch
|
||||
void asm_thumb_bl_ind(asm_thumb_t *as, void *fun_ptr, uint fun_id, uint reg_temp); // convenience
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
|
||||
// The following macros provide a (mostly) arch-independent API to
|
||||
// generate native code, and are used by the native emitter.
|
||||
|
||||
#define ASM_WORD_SIZE (4)
|
||||
|
||||
#define REG_RET ASM_THUMB_REG_R0
|
||||
#define REG_ARG_1 ASM_THUMB_REG_R0
|
||||
#define REG_ARG_2 ASM_THUMB_REG_R1
|
||||
#define REG_ARG_3 ASM_THUMB_REG_R2
|
||||
#define REG_ARG_4 ASM_THUMB_REG_R3
|
||||
// rest of args go on stack
|
||||
|
||||
#define REG_TEMP0 ASM_THUMB_REG_R0
|
||||
#define REG_TEMP1 ASM_THUMB_REG_R1
|
||||
#define REG_TEMP2 ASM_THUMB_REG_R2
|
||||
|
||||
#define REG_LOCAL_1 ASM_THUMB_REG_R4
|
||||
#define REG_LOCAL_2 ASM_THUMB_REG_R5
|
||||
#define REG_LOCAL_3 ASM_THUMB_REG_R6
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_thumb_t
|
||||
#define ASM_END_PASS asm_thumb_end_pass
|
||||
#define ASM_ENTRY asm_thumb_entry
|
||||
#define ASM_EXIT asm_thumb_exit
|
||||
|
||||
#define ASM_JUMP asm_thumb_b_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_thumb_cmp_rlo_i8(as, reg, 0); \
|
||||
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_thumb_cmp_rlo_i8(as, reg, 0); \
|
||||
asm_thumb_bcc_label(as, ASM_THUMB_CC_NE, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
|
||||
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, ASM_THUMB_REG_R3)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_thumb_mov_local_reg(as, (local_num), (reg))
|
||||
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_optimised(as, (reg), (imm))
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_aligned(as, (reg), (imm))
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_thumb_mov_reg_i32_optimised(as, (reg_temp), (imm)); \
|
||||
asm_thumb_mov_local_reg(as, (local_num), (reg_temp)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local(as, (reg), (local_num))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local_addr(as, (reg), (local_num))
|
||||
|
||||
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift))
|
||||
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ORR, (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_EOR, (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_AND, (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_thumb_add_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_thumb_sub_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_MUL, (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrb_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrh_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_thumb_strb_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_thumb_strh_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
|
||||
#endif // GENERIC_ASM_API
|
||||
|
||||
#endif // __MICROPY_INCLUDED_PY_ASMTHUMB_H__
|
||||
|
|
83
py/asmx64.h
83
py/asmx64.h
|
@ -115,4 +115,87 @@ void asm_x64_mov_r64_to_local(asm_x64_t* as, int src_r64, int dest_local_num);
|
|||
void asm_x64_mov_local_addr_to_r64(asm_x64_t* as, int local_num, int dest_r64);
|
||||
void asm_x64_call_ind(asm_x64_t* as, void* ptr, int temp_r32);
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
|
||||
// The following macros provide a (mostly) arch-independent API to
|
||||
// generate native code, and are used by the native emitter.
|
||||
|
||||
#define ASM_WORD_SIZE (8)
|
||||
|
||||
#define REG_RET ASM_X64_REG_RAX
|
||||
#define REG_ARG_1 ASM_X64_REG_RDI
|
||||
#define REG_ARG_2 ASM_X64_REG_RSI
|
||||
#define REG_ARG_3 ASM_X64_REG_RDX
|
||||
#define REG_ARG_4 ASM_X64_REG_RCX
|
||||
#define REG_ARG_5 ASM_X64_REG_R08
|
||||
|
||||
// caller-save
|
||||
#define REG_TEMP0 ASM_X64_REG_RAX
|
||||
#define REG_TEMP1 ASM_X64_REG_RDI
|
||||
#define REG_TEMP2 ASM_X64_REG_RSI
|
||||
|
||||
// callee-save
|
||||
#define REG_LOCAL_1 ASM_X64_REG_RBX
|
||||
#define REG_LOCAL_2 ASM_X64_REG_R12
|
||||
#define REG_LOCAL_3 ASM_X64_REG_R13
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_x64_t
|
||||
#define ASM_END_PASS asm_x64_end_pass
|
||||
#define ASM_ENTRY asm_x64_entry
|
||||
#define ASM_EXIT asm_x64_exit
|
||||
|
||||
#define ASM_JUMP asm_x64_jmp_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x64_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x64_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_x64_cmp_r64_with_r64(as, reg1, reg2); \
|
||||
asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_x64_call_ind(as, ptr, ASM_X64_REG_RAX)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL asm_x64_mov_r64_to_local
|
||||
#define ASM_MOV_IMM_TO_REG asm_x64_mov_i64_to_r64_optimised
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG asm_x64_mov_i64_to_r64_aligned
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_x64_mov_i64_to_r64_optimised(as, (imm), (reg_temp)); \
|
||||
asm_x64_mov_r64_to_local(as, (reg_temp), (local_num)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG asm_x64_mov_local_to_r64
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG asm_x64_mov_local_addr_to_r64
|
||||
|
||||
#define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg))
|
||||
#define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x64_or_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x64_xor_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x64_and_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x64_add_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x64_sub_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x64_mul_r64_r64((as), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem64_to_r64((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x64_mov_mem64_to_r64((as), (reg_base), 8 * (word_offset), (reg_dest))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem8_to_r64zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem16_to_r64zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem32_to_r64zx((as), (reg_base), 0, (reg_dest))
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 8 * (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x64_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x64_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x64_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
|
||||
|
||||
#endif // GENERIC_ASM_API
|
||||
|
||||
#endif // __MICROPY_INCLUDED_PY_ASMX64_H__
|
||||
|
|
83
py/asmx86.h
83
py/asmx86.h
|
@ -113,4 +113,87 @@ void asm_x86_mov_r32_to_local(asm_x86_t* as, int src_r32, int dest_local_num);
|
|||
void asm_x86_mov_local_addr_to_r32(asm_x86_t* as, int local_num, int dest_r32);
|
||||
void asm_x86_call_ind(asm_x86_t* as, void* ptr, mp_uint_t n_args, int temp_r32);
|
||||
|
||||
#if GENERIC_ASM_API
|
||||
|
||||
// The following macros provide a (mostly) arch-independent API to
|
||||
// generate native code, and are used by the native emitter.
|
||||
|
||||
#define ASM_WORD_SIZE (4)
|
||||
|
||||
#define REG_RET ASM_X86_REG_EAX
|
||||
#define REG_ARG_1 ASM_X86_REG_ARG_1
|
||||
#define REG_ARG_2 ASM_X86_REG_ARG_2
|
||||
#define REG_ARG_3 ASM_X86_REG_ARG_3
|
||||
#define REG_ARG_4 ASM_X86_REG_ARG_4
|
||||
#define REG_ARG_5 ASM_X86_REG_ARG_5
|
||||
|
||||
// caller-save, so can be used as temporaries
|
||||
#define REG_TEMP0 ASM_X86_REG_EAX
|
||||
#define REG_TEMP1 ASM_X86_REG_ECX
|
||||
#define REG_TEMP2 ASM_X86_REG_EDX
|
||||
|
||||
// callee-save, so can be used as locals
|
||||
#define REG_LOCAL_1 ASM_X86_REG_EBX
|
||||
#define REG_LOCAL_2 ASM_X86_REG_ESI
|
||||
#define REG_LOCAL_3 ASM_X86_REG_EDI
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_x86_t
|
||||
#define ASM_END_PASS asm_x86_end_pass
|
||||
#define ASM_ENTRY asm_x86_entry
|
||||
#define ASM_EXIT asm_x86_exit
|
||||
|
||||
#define ASM_JUMP asm_x86_jmp_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x86_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x86_jcc_label(as, ASM_X86_CC_JZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x86_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x86_jcc_label(as, ASM_X86_CC_JNZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_x86_cmp_r32_with_r32(as, reg1, reg2); \
|
||||
asm_x86_jcc_label(as, ASM_X86_CC_JE, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_x86_call_ind(as, ptr, mp_f_n_args[idx], ASM_X86_REG_EAX)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL asm_x86_mov_r32_to_local
|
||||
#define ASM_MOV_IMM_TO_REG asm_x86_mov_i32_to_r32
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG asm_x86_mov_i32_to_r32_aligned
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_x86_mov_i32_to_r32(as, (imm), (reg_temp)); \
|
||||
asm_x86_mov_r32_to_local(as, (reg_temp), (local_num)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG asm_x86_mov_local_to_r32
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x86_mov_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG asm_x86_mov_local_addr_to_r32
|
||||
|
||||
#define ASM_LSL_REG(as, reg) asm_x86_shl_r32_cl((as), (reg))
|
||||
#define ASM_ASR_REG(as, reg) asm_x86_sar_r32_cl((as), (reg))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x86_or_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x86_xor_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x86_and_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x86_add_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x86_sub_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x86_mul_r32_r32((as), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem32_to_r32((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x86_mov_mem32_to_r32((as), (reg_base), 4 * (word_offset), (reg_dest))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem8_to_r32zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem16_to_r32zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem32_to_r32((as), (reg_base), 0, (reg_dest))
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 4 * (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x86_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x86_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
|
||||
|
||||
#endif // GENERIC_ASM_API
|
||||
|
||||
#endif // __MICROPY_INCLUDED_PY_ASMX86_H__
|
||||
|
|
311
py/emitnative.c
311
py/emitnative.c
|
@ -63,96 +63,19 @@
|
|||
|| (MICROPY_EMIT_THUMB && N_THUMB) \
|
||||
|| (MICROPY_EMIT_ARM && N_ARM)
|
||||
|
||||
// this is defined so that the assembler exports generic assembler API macros
|
||||
#define GENERIC_ASM_API (1)
|
||||
|
||||
#if N_X64
|
||||
|
||||
// x64 specific stuff
|
||||
|
||||
#include "py/asmx64.h"
|
||||
|
||||
#define EXPORT_FUN(name) emit_native_x64_##name
|
||||
|
||||
#define ASM_WORD_SIZE (8)
|
||||
|
||||
#define REG_RET ASM_X64_REG_RAX
|
||||
#define REG_ARG_1 ASM_X64_REG_RDI
|
||||
#define REG_ARG_2 ASM_X64_REG_RSI
|
||||
#define REG_ARG_3 ASM_X64_REG_RDX
|
||||
#define REG_ARG_4 ASM_X64_REG_RCX
|
||||
#define REG_ARG_5 ASM_X64_REG_R08
|
||||
|
||||
// caller-save
|
||||
#define REG_TEMP0 ASM_X64_REG_RAX
|
||||
#define REG_TEMP1 ASM_X64_REG_RDI
|
||||
#define REG_TEMP2 ASM_X64_REG_RSI
|
||||
|
||||
// callee-save
|
||||
#define REG_LOCAL_1 ASM_X64_REG_RBX
|
||||
#define REG_LOCAL_2 ASM_X64_REG_R12
|
||||
#define REG_LOCAL_3 ASM_X64_REG_R13
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_x64_t
|
||||
#define ASM_END_PASS asm_x64_end_pass
|
||||
#define ASM_ENTRY asm_x64_entry
|
||||
#define ASM_EXIT asm_x64_exit
|
||||
|
||||
#define ASM_JUMP asm_x64_jmp_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x64_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x64_jcc_label(as, ASM_X64_CC_JZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x64_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x64_jcc_label(as, ASM_X64_CC_JNZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_x64_cmp_r64_with_r64(as, reg1, reg2); \
|
||||
asm_x64_jcc_label(as, ASM_X64_CC_JE, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_x64_call_ind(as, ptr, ASM_X64_REG_RAX)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL asm_x64_mov_r64_to_local
|
||||
#define ASM_MOV_IMM_TO_REG asm_x64_mov_i64_to_r64_optimised
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG asm_x64_mov_i64_to_r64_aligned
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_x64_mov_i64_to_r64_optimised(as, (imm), (reg_temp)); \
|
||||
asm_x64_mov_r64_to_local(as, (reg_temp), (local_num)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG asm_x64_mov_local_to_r64
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x64_mov_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG asm_x64_mov_local_addr_to_r64
|
||||
|
||||
#define ASM_LSL_REG(as, reg) asm_x64_shl_r64_cl((as), (reg))
|
||||
#define ASM_ASR_REG(as, reg) asm_x64_sar_r64_cl((as), (reg))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x64_or_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x64_xor_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x64_and_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x64_add_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x64_sub_r64_r64((as), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x64_mul_r64_r64((as), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem64_to_r64((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x64_mov_mem64_to_r64((as), (reg_base), 8 * (word_offset), (reg_dest))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem8_to_r64zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem16_to_r64zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x64_mov_mem32_to_r64zx((as), (reg_base), 0, (reg_dest))
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x64_mov_r64_to_mem64((as), (reg_src), (reg_base), 8 * (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x64_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x64_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x64_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
|
||||
|
||||
#elif N_X86
|
||||
|
||||
// x86 specific stuff
|
||||
|
||||
#include "py/asmx86.h"
|
||||
|
||||
STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
|
||||
[MP_F_CONVERT_OBJ_TO_NATIVE] = 2,
|
||||
[MP_F_CONVERT_NATIVE_TO_OBJ] = 2,
|
||||
|
@ -201,247 +124,21 @@ STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
|
|||
[MP_F_SETUP_CODE_STATE] = 5,
|
||||
};
|
||||
|
||||
#include "py/asmx86.h"
|
||||
#define EXPORT_FUN(name) emit_native_x86_##name
|
||||
|
||||
#define ASM_WORD_SIZE (4)
|
||||
|
||||
#define REG_RET ASM_X86_REG_EAX
|
||||
#define REG_ARG_1 ASM_X86_REG_ARG_1
|
||||
#define REG_ARG_2 ASM_X86_REG_ARG_2
|
||||
#define REG_ARG_3 ASM_X86_REG_ARG_3
|
||||
#define REG_ARG_4 ASM_X86_REG_ARG_4
|
||||
#define REG_ARG_5 ASM_X86_REG_ARG_5
|
||||
|
||||
// caller-save, so can be used as temporaries
|
||||
#define REG_TEMP0 ASM_X86_REG_EAX
|
||||
#define REG_TEMP1 ASM_X86_REG_ECX
|
||||
#define REG_TEMP2 ASM_X86_REG_EDX
|
||||
|
||||
// callee-save, so can be used as locals
|
||||
#define REG_LOCAL_1 ASM_X86_REG_EBX
|
||||
#define REG_LOCAL_2 ASM_X86_REG_ESI
|
||||
#define REG_LOCAL_3 ASM_X86_REG_EDI
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_x86_t
|
||||
#define ASM_END_PASS asm_x86_end_pass
|
||||
#define ASM_ENTRY asm_x86_entry
|
||||
#define ASM_EXIT asm_x86_exit
|
||||
|
||||
#define ASM_JUMP asm_x86_jmp_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x86_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x86_jcc_label(as, ASM_X86_CC_JZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_x86_test_r8_with_r8(as, reg, reg); \
|
||||
asm_x86_jcc_label(as, ASM_X86_CC_JNZ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_x86_cmp_r32_with_r32(as, reg1, reg2); \
|
||||
asm_x86_jcc_label(as, ASM_X86_CC_JE, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_x86_call_ind(as, ptr, mp_f_n_args[idx], ASM_X86_REG_EAX)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL asm_x86_mov_r32_to_local
|
||||
#define ASM_MOV_IMM_TO_REG asm_x86_mov_i32_to_r32
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG asm_x86_mov_i32_to_r32_aligned
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_x86_mov_i32_to_r32(as, (imm), (reg_temp)); \
|
||||
asm_x86_mov_r32_to_local(as, (reg_temp), (local_num)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG asm_x86_mov_local_to_r32
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_x86_mov_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG asm_x86_mov_local_addr_to_r32
|
||||
|
||||
#define ASM_LSL_REG(as, reg) asm_x86_shl_r32_cl((as), (reg))
|
||||
#define ASM_ASR_REG(as, reg) asm_x86_sar_r32_cl((as), (reg))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_x86_or_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_x86_xor_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_x86_and_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_x86_add_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_x86_sub_r32_r32((as), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_x86_mul_r32_r32((as), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem32_to_r32((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_x86_mov_mem32_to_r32((as), (reg_base), 4 * (word_offset), (reg_dest))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem8_to_r32zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem16_to_r32zx((as), (reg_base), 0, (reg_dest))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_x86_mov_mem32_to_r32((as), (reg_base), 0, (reg_dest))
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 4 * (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_x86_mov_r8_to_mem8((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_x86_mov_r16_to_mem16((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_x86_mov_r32_to_mem32((as), (reg_src), (reg_base), 0)
|
||||
|
||||
#elif N_THUMB
|
||||
|
||||
// thumb specific stuff
|
||||
|
||||
#include "py/asmthumb.h"
|
||||
|
||||
#define EXPORT_FUN(name) emit_native_thumb_##name
|
||||
|
||||
#define ASM_WORD_SIZE (4)
|
||||
|
||||
#define REG_RET ASM_THUMB_REG_R0
|
||||
#define REG_ARG_1 ASM_THUMB_REG_R0
|
||||
#define REG_ARG_2 ASM_THUMB_REG_R1
|
||||
#define REG_ARG_3 ASM_THUMB_REG_R2
|
||||
#define REG_ARG_4 ASM_THUMB_REG_R3
|
||||
// rest of args go on stack
|
||||
|
||||
#define REG_TEMP0 ASM_THUMB_REG_R0
|
||||
#define REG_TEMP1 ASM_THUMB_REG_R1
|
||||
#define REG_TEMP2 ASM_THUMB_REG_R2
|
||||
|
||||
#define REG_LOCAL_1 ASM_THUMB_REG_R4
|
||||
#define REG_LOCAL_2 ASM_THUMB_REG_R5
|
||||
#define REG_LOCAL_3 ASM_THUMB_REG_R6
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_thumb_t
|
||||
#define ASM_END_PASS asm_thumb_end_pass
|
||||
#define ASM_ENTRY asm_thumb_entry
|
||||
#define ASM_EXIT asm_thumb_exit
|
||||
|
||||
#define ASM_JUMP asm_thumb_b_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_thumb_cmp_rlo_i8(as, reg, 0); \
|
||||
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_thumb_cmp_rlo_i8(as, reg, 0); \
|
||||
asm_thumb_bcc_label(as, ASM_THUMB_CC_NE, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_thumb_cmp_rlo_rlo(as, reg1, reg2); \
|
||||
asm_thumb_bcc_label(as, ASM_THUMB_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_thumb_bl_ind(as, ptr, idx, ASM_THUMB_REG_R3)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_thumb_mov_local_reg(as, (local_num), (reg))
|
||||
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_optimised(as, (reg), (imm))
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_thumb_mov_reg_i32_aligned(as, (reg), (imm))
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_thumb_mov_reg_i32_optimised(as, (reg_temp), (imm)); \
|
||||
asm_thumb_mov_local_reg(as, (local_num), (reg_temp)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local(as, (reg), (local_num))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_thumb_mov_reg_reg((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_thumb_mov_reg_local_addr(as, (reg), (local_num))
|
||||
|
||||
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_LSL, (reg_dest), (reg_shift))
|
||||
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ASR, (reg_dest), (reg_shift))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_ORR, (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_EOR, (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_AND, (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_thumb_add_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_thumb_sub_rlo_rlo_rlo((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_thumb_format_4((as), ASM_THUMB_FORMAT_4_MUL, (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrb_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_thumb_ldrh_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_thumb_ldr_rlo_rlo_i5((as), (reg_dest), (reg_base), 0)
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_src, reg_base, word_offset) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_src, reg_base) asm_thumb_strb_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE16_REG_REG(as, reg_src, reg_base) asm_thumb_strh_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
#define ASM_STORE32_REG_REG(as, reg_src, reg_base) asm_thumb_str_rlo_rlo_i5((as), (reg_src), (reg_base), 0)
|
||||
|
||||
#elif N_ARM
|
||||
|
||||
// ARM specific stuff
|
||||
|
||||
#include "py/asmarm.h"
|
||||
|
||||
#define ASM_WORD_SIZE (4)
|
||||
|
||||
#define EXPORT_FUN(name) emit_native_arm_##name
|
||||
|
||||
#define REG_RET ASM_ARM_REG_R0
|
||||
#define REG_ARG_1 ASM_ARM_REG_R0
|
||||
#define REG_ARG_2 ASM_ARM_REG_R1
|
||||
#define REG_ARG_3 ASM_ARM_REG_R2
|
||||
#define REG_ARG_4 ASM_ARM_REG_R3
|
||||
|
||||
#define REG_TEMP0 ASM_ARM_REG_R0
|
||||
#define REG_TEMP1 ASM_ARM_REG_R1
|
||||
#define REG_TEMP2 ASM_ARM_REG_R2
|
||||
|
||||
#define REG_LOCAL_1 ASM_ARM_REG_R4
|
||||
#define REG_LOCAL_2 ASM_ARM_REG_R5
|
||||
#define REG_LOCAL_3 ASM_ARM_REG_R6
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_T asm_arm_t
|
||||
#define ASM_END_PASS asm_arm_end_pass
|
||||
#define ASM_ENTRY asm_arm_entry
|
||||
#define ASM_EXIT asm_arm_exit
|
||||
|
||||
#define ASM_JUMP asm_arm_b_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_arm_cmp_reg_i8(as, reg, 0); \
|
||||
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_NONZERO(as, reg, label) \
|
||||
do { \
|
||||
asm_arm_cmp_reg_i8(as, reg, 0); \
|
||||
asm_arm_bcc_label(as, ASM_ARM_CC_NE, label); \
|
||||
} while (0)
|
||||
#define ASM_JUMP_IF_REG_EQ(as, reg1, reg2, label) \
|
||||
do { \
|
||||
asm_arm_cmp_reg_reg(as, reg1, reg2); \
|
||||
asm_arm_bcc_label(as, ASM_ARM_CC_EQ, label); \
|
||||
} while (0)
|
||||
#define ASM_CALL_IND(as, ptr, idx) asm_arm_bl_ind(as, ptr, idx, ASM_ARM_REG_R3)
|
||||
|
||||
#define ASM_MOV_REG_TO_LOCAL(as, reg, local_num) asm_arm_mov_local_reg(as, (local_num), (reg))
|
||||
#define ASM_MOV_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
|
||||
#define ASM_MOV_ALIGNED_IMM_TO_REG(as, imm, reg) asm_arm_mov_reg_i32(as, (reg), (imm))
|
||||
#define ASM_MOV_IMM_TO_LOCAL_USING(as, imm, local_num, reg_temp) \
|
||||
do { \
|
||||
asm_arm_mov_reg_i32(as, (reg_temp), (imm)); \
|
||||
asm_arm_mov_local_reg(as, (local_num), (reg_temp)); \
|
||||
} while (false)
|
||||
#define ASM_MOV_LOCAL_TO_REG(as, local_num, reg) asm_arm_mov_reg_local(as, (reg), (local_num))
|
||||
#define ASM_MOV_REG_REG(as, reg_dest, reg_src) asm_arm_mov_reg_reg((as), (reg_dest), (reg_src))
|
||||
#define ASM_MOV_LOCAL_ADDR_TO_REG(as, local_num, reg) asm_arm_mov_reg_local_addr(as, (reg), (local_num))
|
||||
|
||||
#define ASM_LSL_REG_REG(as, reg_dest, reg_shift) asm_arm_lsl_reg_reg((as), (reg_dest), (reg_shift))
|
||||
#define ASM_ASR_REG_REG(as, reg_dest, reg_shift) asm_arm_asr_reg_reg((as), (reg_dest), (reg_shift))
|
||||
#define ASM_OR_REG_REG(as, reg_dest, reg_src) asm_arm_orr_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_XOR_REG_REG(as, reg_dest, reg_src) asm_arm_eor_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_AND_REG_REG(as, reg_dest, reg_src) asm_arm_and_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_ADD_REG_REG(as, reg_dest, reg_src) asm_arm_add_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_SUB_REG_REG(as, reg_dest, reg_src) asm_arm_sub_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
#define ASM_MUL_REG_REG(as, reg_dest, reg_src) asm_arm_mul_reg_reg_reg((as), (reg_dest), (reg_dest), (reg_src))
|
||||
|
||||
#define ASM_LOAD_REG_REG(as, reg_dest, reg_base) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 0)
|
||||
#define ASM_LOAD_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
|
||||
#define ASM_LOAD8_REG_REG(as, reg_dest, reg_base) asm_arm_ldrb_reg_reg((as), (reg_dest), (reg_base))
|
||||
#define ASM_LOAD16_REG_REG(as, reg_dest, reg_base) asm_arm_ldrh_reg_reg((as), (reg_dest), (reg_base))
|
||||
#define ASM_LOAD32_REG_REG(as, reg_dest, reg_base) asm_arm_ldr_reg_reg((as), (reg_dest), (reg_base), 0)
|
||||
|
||||
#define ASM_STORE_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 0)
|
||||
#define ASM_STORE_REG_REG_OFFSET(as, reg_dest, reg_base, word_offset) asm_arm_str_reg_reg((as), (reg_dest), (reg_base), 4 * (word_offset))
|
||||
#define ASM_STORE8_REG_REG(as, reg_value, reg_base) asm_arm_strb_reg_reg((as), (reg_value), (reg_base))
|
||||
#define ASM_STORE16_REG_REG(as, reg_value, reg_base) asm_arm_strh_reg_reg((as), (reg_value), (reg_base))
|
||||
#define ASM_STORE32_REG_REG(as, reg_value, reg_base) asm_arm_str_reg_reg((as), (reg_value), (reg_base), 0)
|
||||
|
||||
#else
|
||||
|
||||
#error unknown native emitter
|
||||
|
|
Loading…
Reference in New Issue