py: Factor out common code from assemblers into asmbase.[ch].
All assemblers should "derive" from mp_asm_base_t.
This commit is contained in:
parent
21e1703d37
commit
612599587b
99
py/asmarm.c
99
py/asmarm.c
@ -38,50 +38,6 @@
|
||||
|
||||
#define SIGNED_FIT24(x) (((x) & 0xff800000) == 0) || (((x) & 0xff000000) == 0xff000000)
|
||||
|
||||
struct _asm_arm_t {
|
||||
uint pass;
|
||||
mp_uint_t code_offset;
|
||||
mp_uint_t code_size;
|
||||
byte *code_base;
|
||||
byte dummy_data[4];
|
||||
|
||||
mp_uint_t max_num_labels;
|
||||
mp_uint_t *label_offsets;
|
||||
uint push_reglist;
|
||||
uint stack_adjust;
|
||||
};
|
||||
|
||||
asm_arm_t *asm_arm_new(uint max_num_labels) {
|
||||
asm_arm_t *as;
|
||||
|
||||
as = m_new0(asm_arm_t, 1);
|
||||
as->max_num_labels = max_num_labels;
|
||||
as->label_offsets = m_new(mp_uint_t, max_num_labels);
|
||||
|
||||
return as;
|
||||
}
|
||||
|
||||
void asm_arm_free(asm_arm_t *as, bool free_code) {
|
||||
if (free_code) {
|
||||
MP_PLAT_FREE_EXEC(as->code_base, as->code_size);
|
||||
}
|
||||
m_del(mp_uint_t, as->label_offsets, as->max_num_labels);
|
||||
m_del_obj(asm_arm_t, as);
|
||||
}
|
||||
|
||||
void asm_arm_start_pass(asm_arm_t *as, uint pass) {
|
||||
if (pass == ASM_ARM_PASS_COMPUTE) {
|
||||
memset(as->label_offsets, -1, as->max_num_labels * sizeof(mp_uint_t));
|
||||
} else if (pass == ASM_ARM_PASS_EMIT) {
|
||||
MP_PLAT_ALLOC_EXEC(as->code_offset, (void**)&as->code_base, &as->code_size);
|
||||
if (as->code_base == NULL) {
|
||||
assert(0);
|
||||
}
|
||||
}
|
||||
as->pass = pass;
|
||||
as->code_offset = 0;
|
||||
}
|
||||
|
||||
void asm_arm_end_pass(asm_arm_t *as) {
|
||||
if (as->pass == ASM_ARM_PASS_EMIT) {
|
||||
#ifdef __arm__
|
||||
@ -97,32 +53,6 @@ void asm_arm_end_pass(asm_arm_t *as) {
|
||||
}
|
||||
}
|
||||
|
||||
// all functions must go through this one to emit bytes
|
||||
// if as->pass < ASM_ARM_PASS_EMIT, then this function only returns a buffer of 4 bytes length
|
||||
STATIC byte *asm_arm_get_cur_to_write_bytes(asm_arm_t *as, int num_bytes_to_write) {
|
||||
if (as->pass < ASM_ARM_PASS_EMIT) {
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return as->dummy_data;
|
||||
} else {
|
||||
assert(as->code_offset + num_bytes_to_write <= as->code_size);
|
||||
byte *c = as->code_base + as->code_offset;
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return c;
|
||||
}
|
||||
}
|
||||
|
||||
uint asm_arm_get_code_pos(asm_arm_t *as) {
|
||||
return as->code_offset;
|
||||
}
|
||||
|
||||
uint asm_arm_get_code_size(asm_arm_t *as) {
|
||||
return as->code_size;
|
||||
}
|
||||
|
||||
void *asm_arm_get_code(asm_arm_t *as) {
|
||||
return as->code_base;
|
||||
}
|
||||
|
||||
// Insert word into instruction flow
|
||||
STATIC void emit(asm_arm_t *as, uint op) {
|
||||
*(uint*)asm_arm_get_cur_to_write_bytes(as, 4) = op;
|
||||
@ -263,35 +193,6 @@ void asm_arm_pop(asm_arm_t *as, uint reglist) {
|
||||
emit_al(as, asm_arm_op_pop(reglist));
|
||||
}
|
||||
|
||||
void asm_arm_label_assign(asm_arm_t *as, uint label) {
|
||||
assert(label < as->max_num_labels);
|
||||
if (as->pass < ASM_ARM_PASS_EMIT) {
|
||||
// assign label offset
|
||||
assert(as->label_offsets[label] == -1);
|
||||
as->label_offsets[label] = as->code_offset;
|
||||
} else {
|
||||
// ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
|
||||
assert(as->label_offsets[label] == as->code_offset);
|
||||
}
|
||||
}
|
||||
|
||||
void asm_arm_align(asm_arm_t* as, uint align) {
|
||||
// TODO fill unused data with NOPs?
|
||||
as->code_offset = (as->code_offset + align - 1) & (~(align - 1));
|
||||
}
|
||||
|
||||
void asm_arm_data(asm_arm_t* as, uint bytesize, uint val) {
|
||||
byte *c = asm_arm_get_cur_to_write_bytes(as, bytesize);
|
||||
// only write to the buffer in the emit pass (otherwise we overflow dummy_data)
|
||||
if (as->pass == ASM_ARM_PASS_EMIT) {
|
||||
// little endian
|
||||
for (uint i = 0; i < bytesize; i++) {
|
||||
*c++ = val;
|
||||
val >>= 8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void asm_arm_mov_reg_reg(asm_arm_t *as, uint reg_dest, uint reg_src) {
|
||||
emit_al(as, asm_arm_op_mov_reg(reg_dest, reg_src));
|
||||
}
|
||||
|
20
py/asmarm.h
20
py/asmarm.h
@ -28,9 +28,7 @@
|
||||
#define __MICROPY_INCLUDED_PY_ASMARM_H__
|
||||
|
||||
#include "py/misc.h"
|
||||
|
||||
#define ASM_ARM_PASS_COMPUTE (1)
|
||||
#define ASM_ARM_PASS_EMIT (2)
|
||||
#include "py/asmbase.h"
|
||||
|
||||
#define ASM_ARM_REG_R0 (0)
|
||||
#define ASM_ARM_REG_R1 (1)
|
||||
@ -68,22 +66,16 @@
|
||||
#define ASM_ARM_CC_LE (0xd << 28)
|
||||
#define ASM_ARM_CC_AL (0xe << 28)
|
||||
|
||||
typedef struct _asm_arm_t asm_arm_t;
|
||||
typedef struct _asm_arm_t {
|
||||
mp_asm_base_t base;
|
||||
uint push_reglist;
|
||||
uint stack_adjust;
|
||||
} asm_arm_t;
|
||||
|
||||
asm_arm_t *asm_arm_new(uint max_num_labels);
|
||||
void asm_arm_free(asm_arm_t *as, bool free_code);
|
||||
void asm_arm_start_pass(asm_arm_t *as, uint pass);
|
||||
void asm_arm_end_pass(asm_arm_t *as);
|
||||
uint asm_arm_get_code_pos(asm_arm_t *as);
|
||||
uint asm_arm_get_code_size(asm_arm_t *as);
|
||||
void *asm_arm_get_code(asm_arm_t *as);
|
||||
|
||||
void asm_arm_entry(asm_arm_t *as, int num_locals);
|
||||
void asm_arm_exit(asm_arm_t *as);
|
||||
void asm_arm_label_assign(asm_arm_t *as, uint label);
|
||||
|
||||
void asm_arm_align(asm_arm_t* as, uint align);
|
||||
void asm_arm_data(asm_arm_t* as, uint bytesize, uint val);
|
||||
|
||||
void asm_arm_bkpt(asm_arm_t *as);
|
||||
|
||||
|
104
py/asmbase.c
Normal file
104
py/asmbase.c
Normal file
@ -0,0 +1,104 @@
|
||||
/*
|
||||
* This file is part of the MicroPython project, http://micropython.org/
|
||||
*
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2016 Damien P. George
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#include <assert.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "py/obj.h"
|
||||
#include "py/misc.h"
|
||||
#include "py/asmbase.h"
|
||||
|
||||
#if MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_THUMB
|
||||
|
||||
void mp_asm_base_init(mp_asm_base_t *as, size_t max_num_labels) {
|
||||
as->max_num_labels = max_num_labels;
|
||||
as->label_offsets = m_new(size_t, max_num_labels);
|
||||
}
|
||||
|
||||
void mp_asm_base_deinit(mp_asm_base_t *as, bool free_code) {
|
||||
if (free_code) {
|
||||
MP_PLAT_FREE_EXEC(as->code_base, as->code_size);
|
||||
}
|
||||
m_del(size_t, as->label_offsets, as->max_num_labels);
|
||||
}
|
||||
|
||||
void mp_asm_base_start_pass(mp_asm_base_t *as, int pass) {
|
||||
if (pass == MP_ASM_PASS_COMPUTE) {
|
||||
// reset all labels
|
||||
memset(as->label_offsets, -1, as->max_num_labels * sizeof(size_t));
|
||||
} else if (pass == MP_ASM_PASS_EMIT) {
|
||||
// allocating executable RAM is platform specific
|
||||
MP_PLAT_ALLOC_EXEC(as->code_offset, (void**)&as->code_base, &as->code_size);
|
||||
assert(as->code_base != NULL);
|
||||
}
|
||||
as->pass = pass;
|
||||
as->code_offset = 0;
|
||||
}
|
||||
|
||||
// all functions must go through this one to emit bytes
|
||||
// if as->pass < MP_ASM_PASS_EMIT, then this function returns dummy_data
|
||||
uint8_t *mp_asm_base_get_cur_to_write_bytes(mp_asm_base_t *as, size_t num_bytes_to_write) {
|
||||
if (as->pass < MP_ASM_PASS_EMIT) {
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return as->dummy_data;
|
||||
} else {
|
||||
assert(as->code_offset + num_bytes_to_write <= as->code_size);
|
||||
uint8_t *c = as->code_base + as->code_offset;
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return c;
|
||||
}
|
||||
}
|
||||
|
||||
void mp_asm_base_label_assign(mp_asm_base_t *as, size_t label) {
|
||||
assert(label < as->max_num_labels);
|
||||
if (as->pass < MP_ASM_PASS_EMIT) {
|
||||
// assign label offset
|
||||
assert(as->label_offsets[label] == (size_t)-1);
|
||||
as->label_offsets[label] = as->code_offset;
|
||||
} else {
|
||||
// ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
|
||||
assert(as->label_offsets[label] == as->code_offset);
|
||||
}
|
||||
}
|
||||
|
||||
// align must be a multiple of 2
|
||||
void mp_asm_base_align(mp_asm_base_t* as, unsigned int align) {
|
||||
as->code_offset = (as->code_offset + align - 1) & (~(align - 1));
|
||||
}
|
||||
|
||||
// this function assumes a little endian machine
|
||||
void mp_asm_base_data(mp_asm_base_t* as, unsigned int bytesize, uintptr_t val) {
|
||||
uint8_t *c = mp_asm_base_get_cur_to_write_bytes(as, bytesize);
|
||||
// only write to the buffer in the emit pass (otherwise we may overflow dummy_data)
|
||||
if (as->pass == MP_ASM_PASS_EMIT) {
|
||||
for (unsigned int i = 0; i < bytesize; i++) {
|
||||
*c++ = val;
|
||||
val >>= 8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif // MICROPY_EMIT_NATIVE || MICROPY_EMIT_INLINE_THUMB
|
68
py/asmbase.h
Normal file
68
py/asmbase.h
Normal file
@ -0,0 +1,68 @@
|
||||
/*
|
||||
* This file is part of the MicroPython project, http://micropython.org/
|
||||
*
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2016 Damien P. George
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
#ifndef MICROPY_INCLUDED_PY_ASMBASE_H
|
||||
#define MICROPY_INCLUDED_PY_ASMBASE_H
|
||||
|
||||
#include <stdint.h>
|
||||
#include <stdbool.h>
|
||||
|
||||
#define MP_ASM_PASS_COMPUTE (1)
|
||||
#define MP_ASM_PASS_EMIT (2)
|
||||
|
||||
typedef struct _mp_asm_base_t {
|
||||
int pass;
|
||||
size_t code_offset;
|
||||
size_t code_size;
|
||||
uint8_t *code_base;
|
||||
|
||||
size_t max_num_labels;
|
||||
size_t *label_offsets;
|
||||
|
||||
// must be last in struct
|
||||
uint8_t dummy_data[4];
|
||||
} mp_asm_base_t;
|
||||
|
||||
void mp_asm_base_init(mp_asm_base_t *as, size_t max_num_labels);
|
||||
void mp_asm_base_deinit(mp_asm_base_t *as, bool free_code);
|
||||
void mp_asm_base_start_pass(mp_asm_base_t *as, int pass);
|
||||
uint8_t *mp_asm_base_get_cur_to_write_bytes(mp_asm_base_t *as, size_t num_bytes_to_write);
|
||||
void mp_asm_base_label_assign(mp_asm_base_t *as, size_t label);
|
||||
void mp_asm_base_align(mp_asm_base_t* as, unsigned int align);
|
||||
void mp_asm_base_data(mp_asm_base_t* as, unsigned int bytesize, uintptr_t val);
|
||||
|
||||
static inline size_t mp_asm_base_get_code_pos(mp_asm_base_t *as) {
|
||||
return as->code_offset;
|
||||
}
|
||||
|
||||
static inline size_t mp_asm_base_get_code_size(mp_asm_base_t *as) {
|
||||
return as->code_size;
|
||||
}
|
||||
|
||||
static inline void *mp_asm_base_get_code(mp_asm_base_t *as) {
|
||||
return as->code_base;
|
||||
}
|
||||
|
||||
#endif // MICROPY_INCLUDED_PY_ASMBASE_H
|
134
py/asmthumb.c
134
py/asmthumb.c
@ -42,49 +42,8 @@
|
||||
#define SIGNED_FIT12(x) (((x) & 0xfffff800) == 0) || (((x) & 0xfffff800) == 0xfffff800)
|
||||
#define SIGNED_FIT23(x) (((x) & 0xffc00000) == 0) || (((x) & 0xffc00000) == 0xffc00000)
|
||||
|
||||
struct _asm_thumb_t {
|
||||
mp_uint_t pass;
|
||||
mp_uint_t code_offset;
|
||||
mp_uint_t code_size;
|
||||
byte *code_base;
|
||||
byte dummy_data[4];
|
||||
|
||||
mp_uint_t max_num_labels;
|
||||
mp_uint_t *label_offsets;
|
||||
mp_uint_t push_reglist;
|
||||
mp_uint_t stack_adjust;
|
||||
};
|
||||
|
||||
asm_thumb_t *asm_thumb_new(uint max_num_labels) {
|
||||
asm_thumb_t *as;
|
||||
|
||||
as = m_new0(asm_thumb_t, 1);
|
||||
as->max_num_labels = max_num_labels;
|
||||
as->label_offsets = m_new(mp_uint_t, max_num_labels);
|
||||
|
||||
return as;
|
||||
}
|
||||
|
||||
void asm_thumb_free(asm_thumb_t *as, bool free_code) {
|
||||
if (free_code) {
|
||||
MP_PLAT_FREE_EXEC(as->code_base, as->code_size);
|
||||
}
|
||||
m_del(mp_uint_t, as->label_offsets, as->max_num_labels);
|
||||
m_del_obj(asm_thumb_t, as);
|
||||
}
|
||||
|
||||
void asm_thumb_start_pass(asm_thumb_t *as, uint pass) {
|
||||
if (pass == ASM_THUMB_PASS_COMPUTE) {
|
||||
memset(as->label_offsets, -1, as->max_num_labels * sizeof(mp_uint_t));
|
||||
} else if (pass == ASM_THUMB_PASS_EMIT) {
|
||||
MP_PLAT_ALLOC_EXEC(as->code_offset, (void**)&as->code_base, &as->code_size);
|
||||
if (as->code_base == NULL) {
|
||||
assert(0);
|
||||
}
|
||||
//printf("code_size: %u\n", as->code_size);
|
||||
}
|
||||
as->pass = pass;
|
||||
as->code_offset = 0;
|
||||
static inline byte *asm_thumb_get_cur_to_write_bytes(asm_thumb_t *as, int n) {
|
||||
return mp_asm_base_get_cur_to_write_bytes(&as->base, n);
|
||||
}
|
||||
|
||||
void asm_thumb_end_pass(asm_thumb_t *as) {
|
||||
@ -92,7 +51,7 @@ void asm_thumb_end_pass(asm_thumb_t *as) {
|
||||
// could check labels are resolved...
|
||||
|
||||
#if defined(MCU_SERIES_F7)
|
||||
if (as->pass == ASM_THUMB_PASS_EMIT) {
|
||||
if (as->base.pass == MP_ASM_PASS_EMIT) {
|
||||
// flush D-cache, so the code emited is stored in memory
|
||||
SCB_CleanDCache_by_Addr((uint32_t*)as->code_base, as->code_size);
|
||||
// invalidate I-cache
|
||||
@ -101,33 +60,6 @@ void asm_thumb_end_pass(asm_thumb_t *as) {
|
||||
#endif
|
||||
}
|
||||
|
||||
// all functions must go through this one to emit bytes
|
||||
// if as->pass < ASM_THUMB_PASS_EMIT, then this function only returns a buffer of 4 bytes length
|
||||
STATIC byte *asm_thumb_get_cur_to_write_bytes(asm_thumb_t *as, int num_bytes_to_write) {
|
||||
//printf("emit %d\n", num_bytes_to_write);
|
||||
if (as->pass < ASM_THUMB_PASS_EMIT) {
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return as->dummy_data;
|
||||
} else {
|
||||
assert(as->code_offset + num_bytes_to_write <= as->code_size);
|
||||
byte *c = as->code_base + as->code_offset;
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return c;
|
||||
}
|
||||
}
|
||||
|
||||
uint asm_thumb_get_code_pos(asm_thumb_t *as) {
|
||||
return as->code_offset;
|
||||
}
|
||||
|
||||
uint asm_thumb_get_code_size(asm_thumb_t *as) {
|
||||
return as->code_size;
|
||||
}
|
||||
|
||||
void *asm_thumb_get_code(asm_thumb_t *as) {
|
||||
return as->code_base;
|
||||
}
|
||||
|
||||
/*
|
||||
STATIC void asm_thumb_write_byte_1(asm_thumb_t *as, byte b1) {
|
||||
byte *c = asm_thumb_get_cur_to_write_bytes(as, 1);
|
||||
@ -223,39 +155,9 @@ void asm_thumb_exit(asm_thumb_t *as) {
|
||||
asm_thumb_op16(as, OP_POP_RLIST_PC(as->push_reglist));
|
||||
}
|
||||
|
||||
void asm_thumb_label_assign(asm_thumb_t *as, uint label) {
|
||||
assert(label < as->max_num_labels);
|
||||
if (as->pass < ASM_THUMB_PASS_EMIT) {
|
||||
// assign label offset
|
||||
assert(as->label_offsets[label] == -1);
|
||||
as->label_offsets[label] = as->code_offset;
|
||||
} else {
|
||||
// ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
|
||||
//printf("l%d: (at %d=%ld)\n", label, as->label_offsets[label], as->code_offset);
|
||||
assert(as->label_offsets[label] == as->code_offset);
|
||||
}
|
||||
}
|
||||
|
||||
void asm_thumb_align(asm_thumb_t* as, uint align) {
|
||||
// TODO fill unused data with NOPs?
|
||||
as->code_offset = (as->code_offset + align - 1) & (~(align - 1));
|
||||
}
|
||||
|
||||
void asm_thumb_data(asm_thumb_t* as, uint bytesize, uint val) {
|
||||
byte *c = asm_thumb_get_cur_to_write_bytes(as, bytesize);
|
||||
// only write to the buffer in the emit pass (otherwise we overflow dummy_data)
|
||||
if (as->pass == ASM_THUMB_PASS_EMIT) {
|
||||
// little endian
|
||||
for (uint i = 0; i < bytesize; i++) {
|
||||
*c++ = val;
|
||||
val >>= 8;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
STATIC mp_uint_t get_label_dest(asm_thumb_t *as, uint label) {
|
||||
assert(label < as->max_num_labels);
|
||||
return as->label_offsets[label];
|
||||
assert(label < as->base.max_num_labels);
|
||||
return as->base.label_offsets[label];
|
||||
}
|
||||
|
||||
void asm_thumb_op16(asm_thumb_t *as, uint op) {
|
||||
@ -309,10 +211,10 @@ void asm_thumb_mov_reg_i16(asm_thumb_t *as, uint mov_op, uint reg_dest, int i16_
|
||||
|
||||
bool asm_thumb_b_n_label(asm_thumb_t *as, uint label) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->code_offset;
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
|
||||
asm_thumb_op16(as, OP_B_N(rel));
|
||||
return as->pass != ASM_THUMB_PASS_EMIT || SIGNED_FIT12(rel);
|
||||
return as->base.pass != MP_ASM_PASS_EMIT || SIGNED_FIT12(rel);
|
||||
}
|
||||
|
||||
#define OP_BCC_N(cond, byte_offset) (0xd000 | ((cond) << 8) | (((byte_offset) >> 1) & 0x00ff))
|
||||
@ -323,11 +225,11 @@ bool asm_thumb_b_n_label(asm_thumb_t *as, uint label) {
|
||||
|
||||
bool asm_thumb_bcc_nw_label(asm_thumb_t *as, int cond, uint label, bool wide) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->code_offset;
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
|
||||
if (!wide) {
|
||||
asm_thumb_op16(as, OP_BCC_N(cond, rel));
|
||||
return as->pass != ASM_THUMB_PASS_EMIT || SIGNED_FIT9(rel);
|
||||
return as->base.pass != MP_ASM_PASS_EMIT || SIGNED_FIT9(rel);
|
||||
} else {
|
||||
asm_thumb_op32(as, OP_BCC_W_HI(cond, rel), OP_BCC_W_LO(rel));
|
||||
return true;
|
||||
@ -339,10 +241,10 @@ bool asm_thumb_bcc_nw_label(asm_thumb_t *as, int cond, uint label, bool wide) {
|
||||
|
||||
bool asm_thumb_bl_label(asm_thumb_t *as, uint label) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->code_offset;
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
|
||||
asm_thumb_op32(as, OP_BL_HI(rel), OP_BL_LO(rel));
|
||||
return as->pass != ASM_THUMB_PASS_EMIT || SIGNED_FIT23(rel);
|
||||
return as->base.pass != MP_ASM_PASS_EMIT || SIGNED_FIT23(rel);
|
||||
}
|
||||
|
||||
void asm_thumb_mov_reg_i32(asm_thumb_t *as, uint reg_dest, mp_uint_t i32) {
|
||||
@ -367,13 +269,13 @@ void asm_thumb_mov_reg_i32_optimised(asm_thumb_t *as, uint reg_dest, int i32) {
|
||||
// TODO this is very inefficient, improve it!
|
||||
void asm_thumb_mov_reg_i32_aligned(asm_thumb_t *as, uint reg_dest, int i32) {
|
||||
// align on machine-word + 2
|
||||
if ((as->code_offset & 3) == 0) {
|
||||
if ((as->base.code_offset & 3) == 0) {
|
||||
asm_thumb_op16(as, ASM_THUMB_OP_NOP);
|
||||
}
|
||||
// jump over the i32 value (instruction prefetch adds 2 to PC)
|
||||
asm_thumb_op16(as, OP_B_N(2));
|
||||
// store i32 on machine-word aligned boundary
|
||||
asm_thumb_data(as, 4, i32);
|
||||
mp_asm_base_data(&as->base, 4, i32);
|
||||
// do the actual load of the i32 value
|
||||
asm_thumb_mov_reg_i32_optimised(as, reg_dest, i32);
|
||||
}
|
||||
@ -384,14 +286,14 @@ void asm_thumb_mov_reg_i32_aligned(asm_thumb_t *as, uint reg_dest, int i32) {
|
||||
void asm_thumb_mov_local_reg(asm_thumb_t *as, int local_num, uint rlo_src) {
|
||||
assert(rlo_src < ASM_THUMB_REG_R8);
|
||||
int word_offset = local_num;
|
||||
assert(as->pass < ASM_THUMB_PASS_EMIT || word_offset >= 0);
|
||||
assert(as->base.pass < MP_ASM_PASS_EMIT || word_offset >= 0);
|
||||
asm_thumb_op16(as, OP_STR_TO_SP_OFFSET(rlo_src, word_offset));
|
||||
}
|
||||
|
||||
void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num) {
|
||||
assert(rlo_dest < ASM_THUMB_REG_R8);
|
||||
int word_offset = local_num;
|
||||
assert(as->pass < ASM_THUMB_PASS_EMIT || word_offset >= 0);
|
||||
assert(as->base.pass < MP_ASM_PASS_EMIT || word_offset >= 0);
|
||||
asm_thumb_op16(as, OP_LDR_FROM_SP_OFFSET(rlo_dest, word_offset));
|
||||
}
|
||||
|
||||
@ -400,7 +302,7 @@ void asm_thumb_mov_reg_local(asm_thumb_t *as, uint rlo_dest, int local_num) {
|
||||
void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num) {
|
||||
assert(rlo_dest < ASM_THUMB_REG_R8);
|
||||
int word_offset = local_num;
|
||||
assert(as->pass < ASM_THUMB_PASS_EMIT || word_offset >= 0);
|
||||
assert(as->base.pass < MP_ASM_PASS_EMIT || word_offset >= 0);
|
||||
asm_thumb_op16(as, OP_ADD_REG_SP_OFFSET(rlo_dest, word_offset));
|
||||
}
|
||||
|
||||
@ -410,7 +312,7 @@ void asm_thumb_mov_reg_local_addr(asm_thumb_t *as, uint rlo_dest, int local_num)
|
||||
|
||||
void asm_thumb_b_label(asm_thumb_t *as, uint label) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->code_offset;
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
|
||||
if (dest != (mp_uint_t)-1 && rel <= -4) {
|
||||
// is a backwards jump, so we know the size of the jump on the first pass
|
||||
@ -429,7 +331,7 @@ void asm_thumb_b_label(asm_thumb_t *as, uint label) {
|
||||
|
||||
void asm_thumb_bcc_label(asm_thumb_t *as, int cond, uint label) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->code_offset;
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
rel -= 4; // account for instruction prefetch, PC is 4 bytes ahead of this instruction
|
||||
if (dest != (mp_uint_t)-1 && rel <= -4) {
|
||||
// is a backwards jump, so we know the size of the jump on the first pass
|
||||
|
@ -27,9 +27,7 @@
|
||||
#define __MICROPY_INCLUDED_PY_ASMTHUMB_H__
|
||||
|
||||
#include "py/misc.h"
|
||||
|
||||
#define ASM_THUMB_PASS_COMPUTE (1)
|
||||
#define ASM_THUMB_PASS_EMIT (2)
|
||||
#include "py/asmbase.h"
|
||||
|
||||
#define ASM_THUMB_REG_R0 (0)
|
||||
#define ASM_THUMB_REG_R1 (1)
|
||||
@ -64,24 +62,17 @@
|
||||
#define ASM_THUMB_CC_GT (0xc)
|
||||
#define ASM_THUMB_CC_LE (0xd)
|
||||
|
||||
typedef struct _asm_thumb_t asm_thumb_t;
|
||||
typedef struct _asm_thumb_t {
|
||||
mp_asm_base_t base;
|
||||
uint32_t push_reglist;
|
||||
uint32_t stack_adjust;
|
||||
} asm_thumb_t;
|
||||
|
||||
asm_thumb_t *asm_thumb_new(uint max_num_labels);
|
||||
void asm_thumb_free(asm_thumb_t *as, bool free_code);
|
||||
void asm_thumb_start_pass(asm_thumb_t *as, uint pass);
|
||||
void asm_thumb_end_pass(asm_thumb_t *as);
|
||||
uint asm_thumb_get_code_pos(asm_thumb_t *as);
|
||||
uint asm_thumb_get_code_size(asm_thumb_t *as);
|
||||
void *asm_thumb_get_code(asm_thumb_t *as);
|
||||
|
||||
void asm_thumb_entry(asm_thumb_t *as, int num_locals);
|
||||
void asm_thumb_exit(asm_thumb_t *as);
|
||||
|
||||
void asm_thumb_label_assign(asm_thumb_t *as, uint label);
|
||||
|
||||
void asm_thumb_align(asm_thumb_t* as, uint align);
|
||||
void asm_thumb_data(asm_thumb_t* as, uint bytesize, uint val);
|
||||
|
||||
// argument order follows ARM, in general dest is first
|
||||
// note there is a difference between movw and mov.w, and many others!
|
||||
|
||||
|
114
py/asmx64.c
114
py/asmx64.c
@ -116,80 +116,8 @@
|
||||
#define UNSIGNED_FIT32(x) (((x) & 0xffffffff00000000) == 0)
|
||||
#define SIGNED_FIT8(x) (((x) & 0xffffff80) == 0) || (((x) & 0xffffff80) == 0xffffff80)
|
||||
|
||||
struct _asm_x64_t {
|
||||
uint pass;
|
||||
mp_uint_t code_offset;
|
||||
mp_uint_t code_size;
|
||||
byte *code_base;
|
||||
byte dummy_data[8];
|
||||
|
||||
mp_uint_t max_num_labels;
|
||||
mp_uint_t *label_offsets;
|
||||
int num_locals;
|
||||
};
|
||||
|
||||
asm_x64_t *asm_x64_new(mp_uint_t max_num_labels) {
|
||||
asm_x64_t *as;
|
||||
|
||||
as = m_new0(asm_x64_t, 1);
|
||||
as->max_num_labels = max_num_labels;
|
||||
as->label_offsets = m_new(mp_uint_t, max_num_labels);
|
||||
|
||||
return as;
|
||||
}
|
||||
|
||||
void asm_x64_free(asm_x64_t *as, bool free_code) {
|
||||
if (free_code) {
|
||||
MP_PLAT_FREE_EXEC(as->code_base, as->code_size);
|
||||
}
|
||||
m_del(mp_uint_t, as->label_offsets, as->max_num_labels);
|
||||
m_del_obj(asm_x64_t, as);
|
||||
}
|
||||
|
||||
void asm_x64_start_pass(asm_x64_t *as, uint pass) {
|
||||
if (pass == ASM_X64_PASS_COMPUTE) {
|
||||
// reset all labels
|
||||
memset(as->label_offsets, -1, as->max_num_labels * sizeof(mp_uint_t));
|
||||
} if (pass == ASM_X64_PASS_EMIT) {
|
||||
MP_PLAT_ALLOC_EXEC(as->code_offset, (void**)&as->code_base, &as->code_size);
|
||||
if (as->code_base == NULL) {
|
||||
assert(0);
|
||||
}
|
||||
//printf("code_size: %u\n", as->code_size);
|
||||
}
|
||||
as->pass = pass;
|
||||
as->code_offset = 0;
|
||||
}
|
||||
|
||||
void asm_x64_end_pass(asm_x64_t *as) {
|
||||
// could check labels are resolved...
|
||||
(void)as;
|
||||
}
|
||||
|
||||
// all functions must go through this one to emit bytes
|
||||
STATIC byte *asm_x64_get_cur_to_write_bytes(asm_x64_t *as, int num_bytes_to_write) {
|
||||
//printf("emit %d\n", num_bytes_to_write);
|
||||
if (as->pass < ASM_X64_PASS_EMIT) {
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return as->dummy_data;
|
||||
} else {
|
||||
assert(as->code_offset + num_bytes_to_write <= as->code_size);
|
||||
byte *c = as->code_base + as->code_offset;
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return c;
|
||||
}
|
||||
}
|
||||
|
||||
mp_uint_t asm_x64_get_code_pos(asm_x64_t *as) {
|
||||
return as->code_offset;
|
||||
}
|
||||
|
||||
mp_uint_t asm_x64_get_code_size(asm_x64_t *as) {
|
||||
return as->code_size;
|
||||
}
|
||||
|
||||
void *asm_x64_get_code(asm_x64_t *as) {
|
||||
return as->code_base;
|
||||
static inline byte *asm_x64_get_cur_to_write_bytes(asm_x64_t *as, int n) {
|
||||
return mp_asm_base_get_cur_to_write_bytes(&as->base, n);
|
||||
}
|
||||
|
||||
STATIC void asm_x64_write_byte_1(asm_x64_t *as, byte b1) {
|
||||
@ -230,21 +158,6 @@ STATIC void asm_x64_write_word64(asm_x64_t *as, int64_t w64) {
|
||||
c[7] = IMM64_L7(w64);
|
||||
}
|
||||
|
||||
// align must be a multiple of 2
|
||||
void asm_x64_align(asm_x64_t* as, mp_uint_t align) {
|
||||
// TODO fill unused data with NOPs?
|
||||
as->code_offset = (as->code_offset + align - 1) & (~(align - 1));
|
||||
}
|
||||
|
||||
void asm_x64_data(asm_x64_t* as, mp_uint_t bytesize, mp_uint_t val) {
|
||||
byte *c = asm_x64_get_cur_to_write_bytes(as, bytesize);
|
||||
// machine is little endian
|
||||
for (uint i = 0; i < bytesize; i++) {
|
||||
*c++ = val;
|
||||
val >>= 8;
|
||||
}
|
||||
}
|
||||
|
||||
/* unused
|
||||
STATIC void asm_x64_write_word32_to(asm_x64_t *as, int offset, int w32) {
|
||||
byte* c;
|
||||
@ -440,7 +353,7 @@ void asm_x64_mov_i64_to_r64_optimised(asm_x64_t *as, int64_t src_i64, int dest_r
|
||||
// src_i64 is stored as a full word in the code, and aligned to machine-word boundary
|
||||
void asm_x64_mov_i64_to_r64_aligned(asm_x64_t *as, int64_t src_i64, int dest_r64) {
|
||||
// mov instruction uses 2 bytes for the instruction, before the i64
|
||||
while (((as->code_offset + 2) & (WORD_SIZE - 1)) != 0) {
|
||||
while (((as->base.code_offset + 2) & (WORD_SIZE - 1)) != 0) {
|
||||
asm_x64_nop(as);
|
||||
}
|
||||
asm_x64_mov_i64_to_r64(as, src_i64, dest_r64);
|
||||
@ -552,27 +465,14 @@ void asm_x64_setcc_r8(asm_x64_t *as, int jcc_type, int dest_r8) {
|
||||
asm_x64_write_byte_3(as, OPCODE_SETCC_RM8_A, OPCODE_SETCC_RM8_B | jcc_type, MODRM_R64(0) | MODRM_RM_REG | MODRM_RM_R64(dest_r8));
|
||||
}
|
||||
|
||||
void asm_x64_label_assign(asm_x64_t *as, mp_uint_t label) {
|
||||
assert(label < as->max_num_labels);
|
||||
if (as->pass < ASM_X64_PASS_EMIT) {
|
||||
// assign label offset
|
||||
assert(as->label_offsets[label] == (mp_uint_t)-1);
|
||||
as->label_offsets[label] = as->code_offset;
|
||||
} else {
|
||||
// ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
|
||||
//printf("l%d: (at %ld=%ld)\n", label, as->label_offsets[label], as->code_offset);
|
||||
assert(as->label_offsets[label] == as->code_offset);
|
||||
}
|
||||
}
|
||||
|
||||
STATIC mp_uint_t get_label_dest(asm_x64_t *as, mp_uint_t label) {
|
||||
assert(label < as->max_num_labels);
|
||||
return as->label_offsets[label];
|
||||
assert(label < as->base.max_num_labels);
|
||||
return as->base.label_offsets[label];
|
||||
}
|
||||
|
||||
void asm_x64_jmp_label(asm_x64_t *as, mp_uint_t label) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->code_offset;
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
if (dest != (mp_uint_t)-1 && rel < 0) {
|
||||
// is a backwards jump, so we know the size of the jump on the first pass
|
||||
// calculate rel assuming 8 bit relative jump
|
||||
@ -594,7 +494,7 @@ void asm_x64_jmp_label(asm_x64_t *as, mp_uint_t label) {
|
||||
|
||||
void asm_x64_jcc_label(asm_x64_t *as, int jcc_type, mp_uint_t label) {
|
||||
mp_uint_t dest = get_label_dest(as, label);
|
||||
mp_int_t rel = dest - as->code_offset;
|
||||
mp_int_t rel = dest - as->base.code_offset;
|
||||
if (dest != (mp_uint_t)-1 && rel < 0) {
|
||||
// is a backwards jump, so we know the size of the jump on the first pass
|
||||
// calculate rel assuming 8 bit relative jump
|
||||
|
24
py/asmx64.h
24
py/asmx64.h
@ -28,6 +28,7 @@
|
||||
|
||||
#include "py/mpconfig.h"
|
||||
#include "py/misc.h"
|
||||
#include "py/asmbase.h"
|
||||
|
||||
// AMD64 calling convention is:
|
||||
// - args pass in: RDI, RSI, RDX, RCX, R08, R09
|
||||
@ -41,9 +42,6 @@
|
||||
// NOTE: this is a change from the old convention used in this file and
|
||||
// some functions still use the old (reverse) convention.
|
||||
|
||||
#define ASM_X64_PASS_COMPUTE (1)
|
||||
#define ASM_X64_PASS_EMIT (2)
|
||||
|
||||
#define ASM_X64_REG_RAX (0)
|
||||
#define ASM_X64_REG_RCX (1)
|
||||
#define ASM_X64_REG_RDX (2)
|
||||
@ -72,18 +70,15 @@
|
||||
#define ASM_X64_CC_JLE (0xe) // less or equal, signed
|
||||
#define ASM_X64_CC_JG (0xf) // greater, signed
|
||||
|
||||
typedef struct _asm_x64_t asm_x64_t;
|
||||
typedef struct _asm_x64_t {
|
||||
mp_asm_base_t base;
|
||||
byte dummy_data[4]; // in addition to dummy_data in base
|
||||
int num_locals;
|
||||
} asm_x64_t;
|
||||
|
||||
asm_x64_t* asm_x64_new(mp_uint_t max_num_labels);
|
||||
void asm_x64_free(asm_x64_t* as, bool free_code);
|
||||
void asm_x64_start_pass(asm_x64_t *as, uint pass);
|
||||
void asm_x64_end_pass(asm_x64_t *as);
|
||||
mp_uint_t asm_x64_get_code_pos(asm_x64_t *as);
|
||||
mp_uint_t asm_x64_get_code_size(asm_x64_t* as);
|
||||
void* asm_x64_get_code(asm_x64_t* as);
|
||||
|
||||
void asm_x64_align(asm_x64_t *as, mp_uint_t align);
|
||||
void asm_x64_data(asm_x64_t *as, mp_uint_t bytesize, mp_uint_t val);
|
||||
static inline void asm_x64_end_pass(asm_x64_t *as) {
|
||||
(void)as;
|
||||
}
|
||||
|
||||
void asm_x64_nop(asm_x64_t* as);
|
||||
void asm_x64_push_r64(asm_x64_t* as, int src_r64);
|
||||
@ -111,7 +106,6 @@ void asm_x64_mul_r64_r64(asm_x64_t* as, int dest_r64, int src_r64);
|
||||
void asm_x64_cmp_r64_with_r64(asm_x64_t* as, int src_r64_a, int src_r64_b);
|
||||
void asm_x64_test_r8_with_r8(asm_x64_t* as, int src_r64_a, int src_r64_b);
|
||||
void asm_x64_setcc_r8(asm_x64_t* as, int jcc_type, int dest_r8);
|
||||
void asm_x64_label_assign(asm_x64_t* as, mp_uint_t label);
|
||||
void asm_x64_jmp_label(asm_x64_t* as, mp_uint_t label);
|
||||
void asm_x64_jcc_label(asm_x64_t* as, int jcc_type, mp_uint_t label);
|
||||
void asm_x64_entry(asm_x64_t* as, int num_locals);
|
||||
|
102
py/asmx86.c
102
py/asmx86.c
@ -100,80 +100,6 @@
|
||||
|
||||
#define SIGNED_FIT8(x) (((x) & 0xffffff80) == 0) || (((x) & 0xffffff80) == 0xffffff80)
|
||||
|
||||
struct _asm_x86_t {
|
||||
uint pass;
|
||||
mp_uint_t code_offset;
|
||||
mp_uint_t code_size;
|
||||
byte *code_base;
|
||||
byte dummy_data[8];
|
||||
|
||||
mp_uint_t max_num_labels;
|
||||
mp_uint_t *label_offsets;
|
||||
int num_locals;
|
||||
};
|
||||
|
||||
asm_x86_t *asm_x86_new(mp_uint_t max_num_labels) {
|
||||
asm_x86_t *as;
|
||||
|
||||
as = m_new0(asm_x86_t, 1);
|
||||
as->max_num_labels = max_num_labels;
|
||||
as->label_offsets = m_new(mp_uint_t, max_num_labels);
|
||||
|
||||
return as;
|
||||
}
|
||||
|
||||
void asm_x86_free(asm_x86_t *as, bool free_code) {
|
||||
if (free_code) {
|
||||
MP_PLAT_FREE_EXEC(as->code_base, as->code_size);
|
||||
}
|
||||
m_del(mp_uint_t, as->label_offsets, as->max_num_labels);
|
||||
m_del_obj(asm_x86_t, as);
|
||||
}
|
||||
|
||||
void asm_x86_start_pass(asm_x86_t *as, mp_uint_t pass) {
|
||||
if (pass == ASM_X86_PASS_COMPUTE) {
|
||||
// reset all labels
|
||||
memset(as->label_offsets, -1, as->max_num_labels * sizeof(mp_uint_t));
|
||||
} else if (pass == ASM_X86_PASS_EMIT) {
|
||||
MP_PLAT_ALLOC_EXEC(as->code_offset, (void**)&as->code_base, &as->code_size);
|
||||
if (as->code_base == NULL) {
|
||||
assert(0);
|
||||
}
|
||||
}
|
||||
as->pass = pass;
|
||||
as->code_offset = 0;
|
||||
}
|
||||
|
||||
void asm_x86_end_pass(asm_x86_t *as) {
|
||||
(void)as;
|
||||
}
|
||||
|
||||
// all functions must go through this one to emit bytes
|
||||
STATIC byte *asm_x86_get_cur_to_write_bytes(asm_x86_t *as, int num_bytes_to_write) {
|
||||
//printf("emit %d\n", num_bytes_to_write);
|
||||
if (as->pass < ASM_X86_PASS_EMIT) {
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return as->dummy_data;
|
||||
} else {
|
||||
assert(as->code_offset + num_bytes_to_write <= as->code_size);
|
||||
byte *c = as->code_base + as->code_offset;
|
||||
as->code_offset += num_bytes_to_write;
|
||||
return c;
|
||||
}
|
||||
}
|
||||
|
||||
mp_uint_t asm_x86_get_code_pos(asm_x86_t *as) {
|
||||
return as->code_offset;
|
||||
}
|
||||
|
||||
mp_uint_t asm_x86_get_code_size(asm_x86_t *as) {
|
||||
return as->code_size;
|
||||
}
|
||||
|
||||
void *asm_x86_get_code(asm_x86_t *as) {
|
||||
return as->code_base;
|
||||
}
|
||||
|
||||
STATIC void asm_x86_write_byte_1(asm_x86_t *as, byte b1) {
|
||||
byte* c = asm_x86_get_cur_to_write_bytes(as, 1);
|
||||
c[0] = b1;
|
||||
@ -200,21 +126,6 @@ STATIC void asm_x86_write_word32(asm_x86_t *as, int w32) {
|
||||
c[3] = IMM32_L3(w32);
|
||||
}
|
||||
|
||||
// align must be a multiple of 2
|
||||
void asm_x86_align(asm_x86_t* as, mp_uint_t align) {
|
||||
// TODO fill unused data with NOPs?
|
||||
as->code_offset = (as->code_offset + align - 1) & (~(align - 1));
|
||||
}
|
||||
|
||||
void asm_x86_data(asm_x86_t* as, mp_uint_t bytesize, mp_uint_t val) {
|
||||
byte *c = asm_x86_get_cur_to_write_bytes(as, bytesize);
|
||||
// machine is little endian
|
||||
for (uint i = 0; i < bytesize; i++) {
|
||||
*c++ = val;
|
||||
val >>= 8;
|
||||
}
|
||||
}
|
||||
|
||||
STATIC void asm_x86_write_r32_disp(asm_x86_t *as, int r32, int disp_r32, int disp_offset) {
|
||||
assert(disp_r32 != ASM_X86_REG_ESP);
|
||||
|
||||
@ -419,19 +330,6 @@ void asm_x86_setcc_r8(asm_x86_t *as, mp_uint_t jcc_type, int dest_r8) {
|
||||
asm_x86_write_byte_3(as, OPCODE_SETCC_RM8_A, OPCODE_SETCC_RM8_B | jcc_type, MODRM_R32(0) | MODRM_RM_REG | MODRM_RM_R32(dest_r8));
|
||||
}
|
||||
|
||||
void asm_x86_label_assign(asm_x86_t *as, mp_uint_t label) {
|
||||
assert(label < as->max_num_labels);
|
||||
if (as->pass < ASM_X86_PASS_EMIT) {
|
||||
// assign label offset
|
||||
assert(as->label_offsets[label] == (mp_uint_t)-1);
|
||||
as->label_offsets[label] = as->code_offset;
|
||||
} else {
|
||||
// ensure label offset has not changed from PASS_COMPUTE to PASS_EMIT
|
||||
//printf("l%d: (at %d=%ld)\n", label, as->label_offsets[label], as->code_offset);
|
||||
assert(as->label_offsets[label] == as->code_offset);
|
||||
}
|
||||
}
|
||||
|
||||
STATIC mp_uint_t get_label_dest(asm_x86_t *as, mp_uint_t label) {
|
||||
assert(label < as->max_num_labels);
|
||||
return as->label_offsets[label];
|
||||
|
24
py/asmx86.h
24
py/asmx86.h
@ -28,6 +28,7 @@
|
||||
|
||||
#include "py/mpconfig.h"
|
||||
#include "py/misc.h"
|
||||
#include "py/asmbase.h"
|
||||
|
||||
// x86 cdecl calling convention is:
|
||||
// - args passed on the stack in reverse order
|
||||
@ -42,9 +43,6 @@
|
||||
// NOTE: this is a change from the old convention used in this file and
|
||||
// some functions still use the old (reverse) convention.
|
||||
|
||||
#define ASM_X86_PASS_COMPUTE (1)
|
||||
#define ASM_X86_PASS_EMIT (2)
|
||||
|
||||
#define ASM_X86_REG_EAX (0)
|
||||
#define ASM_X86_REG_ECX (1)
|
||||
#define ASM_X86_REG_EDX (2)
|
||||
@ -75,18 +73,15 @@
|
||||
#define ASM_X86_CC_JLE (0xe) // less or equal, signed
|
||||
#define ASM_X86_CC_JG (0xf) // greater, signed
|
||||
|
||||
typedef struct _asm_x86_t asm_x86_t;
|
||||
typedef struct _asm_x86_t {
|
||||
mp_asm_base_t base;
|
||||
byte dummy_data[4]; // in addition to dummy_data in base
|
||||
int num_locals;
|
||||
} asm_x86_t;
|
||||
|
||||
asm_x86_t* asm_x86_new(mp_uint_t max_num_labels);
|
||||
void asm_x86_free(asm_x86_t* as, bool free_code);
|
||||
void asm_x86_start_pass(asm_x86_t *as, mp_uint_t pass);
|
||||
void asm_x86_end_pass(asm_x86_t *as);
|
||||
mp_uint_t asm_x86_get_code_pos(asm_x86_t *as);
|
||||
mp_uint_t asm_x86_get_code_size(asm_x86_t* as);
|
||||
void* asm_x86_get_code(asm_x86_t* as);
|
||||
|
||||
void asm_x86_align(asm_x86_t *as, mp_uint_t align);
|
||||
void asm_x86_data(asm_x86_t *as, mp_uint_t bytesize, mp_uint_t val);
|
||||
static inline void asm_x86_end_pass(asm_x86_t *as) {
|
||||
(void)as;
|
||||
}
|
||||
|
||||
void asm_x86_mov_r32_r32(asm_x86_t* as, int dest_r32, int src_r32);
|
||||
void asm_x86_mov_i32_to_r32(asm_x86_t *as, int32_t src_i32, int dest_r32);
|
||||
@ -108,7 +103,6 @@ void asm_x86_mul_r32_r32(asm_x86_t* as, int dest_r32, int src_r32);
|
||||
void asm_x86_cmp_r32_with_r32(asm_x86_t* as, int src_r32_a, int src_r32_b);
|
||||
void asm_x86_test_r8_with_r8(asm_x86_t* as, int src_r32_a, int src_r32_b);
|
||||
void asm_x86_setcc_r8(asm_x86_t* as, mp_uint_t jcc_type, int dest_r8);
|
||||
void asm_x86_label_assign(asm_x86_t* as, mp_uint_t label);
|
||||
void asm_x86_jmp_label(asm_x86_t* as, mp_uint_t label);
|
||||
void asm_x86_jcc_label(asm_x86_t* as, mp_uint_t jcc_type, mp_uint_t label);
|
||||
void asm_x86_entry(asm_x86_t* as, mp_uint_t num_locals);
|
||||
|
@ -63,13 +63,15 @@ emit_inline_asm_t *emit_inline_thumb_new(mp_uint_t max_num_labels) {
|
||||
emit_inline_asm_t *emit = m_new_obj(emit_inline_asm_t);
|
||||
emit->max_num_labels = max_num_labels;
|
||||
emit->label_lookup = m_new(qstr, max_num_labels);
|
||||
emit->as = asm_thumb_new(max_num_labels);
|
||||
emit->as = m_new0(asm_thumb_t, 1);
|
||||
mp_asm_base_init(&emit->as->base, max_num_labels);
|
||||
return emit;
|
||||
}
|
||||
|
||||
void emit_inline_thumb_free(emit_inline_asm_t *emit) {
|
||||
m_del(qstr, emit->label_lookup, emit->max_num_labels);
|
||||
asm_thumb_free(emit->as, false);
|
||||
mp_asm_base_deinit(&emit->as->base, false);
|
||||
m_del_obj(asm_thumb_t, emit->as);
|
||||
m_del_obj(emit_inline_asm_t, emit);
|
||||
}
|
||||
|
||||
@ -80,7 +82,7 @@ STATIC void emit_inline_thumb_start_pass(emit_inline_asm_t *emit, pass_kind_t pa
|
||||
if (emit->pass == MP_PASS_CODE_SIZE) {
|
||||
memset(emit->label_lookup, 0, emit->max_num_labels * sizeof(qstr));
|
||||
}
|
||||
asm_thumb_start_pass(emit->as, pass == MP_PASS_EMIT ? ASM_THUMB_PASS_EMIT : ASM_THUMB_PASS_COMPUTE);
|
||||
mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
|
||||
asm_thumb_entry(emit->as, 0);
|
||||
}
|
||||
|
||||
@ -89,9 +91,9 @@ STATIC void emit_inline_thumb_end_pass(emit_inline_asm_t *emit, mp_uint_t type_s
|
||||
asm_thumb_end_pass(emit->as);
|
||||
|
||||
if (emit->pass == MP_PASS_EMIT) {
|
||||
void *f = asm_thumb_get_code(emit->as);
|
||||
void *f = mp_asm_base_get_code(&emit->as->base);
|
||||
mp_emit_glue_assign_native(emit->scope->raw_code, MP_CODE_NATIVE_ASM, f,
|
||||
asm_thumb_get_code_size(emit->as), NULL, emit->scope->num_pos_args, 0, type_sig);
|
||||
mp_asm_base_get_code_size(&emit->as->base), NULL, emit->scope->num_pos_args, 0, type_sig);
|
||||
}
|
||||
}
|
||||
|
||||
@ -125,16 +127,16 @@ STATIC bool emit_inline_thumb_label(emit_inline_asm_t *emit, mp_uint_t label_num
|
||||
}
|
||||
}
|
||||
emit->label_lookup[label_num] = label_id;
|
||||
asm_thumb_label_assign(emit->as, label_num);
|
||||
mp_asm_base_label_assign(&emit->as->base, label_num);
|
||||
return true;
|
||||
}
|
||||
|
||||
STATIC void emit_inline_thumb_align(emit_inline_asm_t *emit, mp_uint_t align) {
|
||||
asm_thumb_align(emit->as, align);
|
||||
mp_asm_base_align(&emit->as->base, align);
|
||||
}
|
||||
|
||||
STATIC void emit_inline_thumb_data(emit_inline_asm_t *emit, mp_uint_t bytesize, mp_uint_t val) {
|
||||
asm_thumb_data(emit->as, bytesize, val);
|
||||
mp_asm_base_data(&emit->as->base, bytesize, val);
|
||||
}
|
||||
|
||||
typedef struct _reg_name_t { byte reg; byte name[3]; } reg_name_t;
|
||||
|
@ -91,24 +91,11 @@
|
||||
#define REG_LOCAL_3 ASM_X64_REG_R13
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_PASS_COMPUTE ASM_X64_PASS_COMPUTE
|
||||
#define ASM_PASS_EMIT ASM_X64_PASS_EMIT
|
||||
|
||||
#define ASM_T asm_x64_t
|
||||
#define ASM_NEW asm_x64_new
|
||||
#define ASM_FREE asm_x64_free
|
||||
#define ASM_GET_CODE asm_x64_get_code
|
||||
#define ASM_GET_CODE_POS asm_x64_get_code_pos
|
||||
#define ASM_GET_CODE_SIZE asm_x64_get_code_size
|
||||
#define ASM_START_PASS asm_x64_start_pass
|
||||
#define ASM_END_PASS asm_x64_end_pass
|
||||
#define ASM_ENTRY asm_x64_entry
|
||||
#define ASM_EXIT asm_x64_exit
|
||||
|
||||
#define ASM_ALIGN asm_x64_align
|
||||
#define ASM_DATA asm_x64_data
|
||||
|
||||
#define ASM_LABEL_ASSIGN asm_x64_label_assign
|
||||
#define ASM_JUMP asm_x64_jmp_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
@ -236,24 +223,11 @@ STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
|
||||
#define REG_LOCAL_3 ASM_X86_REG_EDI
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_PASS_COMPUTE ASM_X86_PASS_COMPUTE
|
||||
#define ASM_PASS_EMIT ASM_X86_PASS_EMIT
|
||||
|
||||
#define ASM_T asm_x86_t
|
||||
#define ASM_NEW asm_x86_new
|
||||
#define ASM_FREE asm_x86_free
|
||||
#define ASM_GET_CODE asm_x86_get_code
|
||||
#define ASM_GET_CODE_POS asm_x86_get_code_pos
|
||||
#define ASM_GET_CODE_SIZE asm_x86_get_code_size
|
||||
#define ASM_START_PASS asm_x86_start_pass
|
||||
#define ASM_END_PASS asm_x86_end_pass
|
||||
#define ASM_ENTRY asm_x86_entry
|
||||
#define ASM_EXIT asm_x86_exit
|
||||
|
||||
#define ASM_ALIGN asm_x86_align
|
||||
#define ASM_DATA asm_x86_data
|
||||
|
||||
#define ASM_LABEL_ASSIGN asm_x86_label_assign
|
||||
#define ASM_JUMP asm_x86_jmp_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
@ -331,24 +305,11 @@ STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
|
||||
#define REG_LOCAL_3 ASM_THUMB_REG_R6
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_PASS_COMPUTE ASM_THUMB_PASS_COMPUTE
|
||||
#define ASM_PASS_EMIT ASM_THUMB_PASS_EMIT
|
||||
|
||||
#define ASM_T asm_thumb_t
|
||||
#define ASM_NEW asm_thumb_new
|
||||
#define ASM_FREE asm_thumb_free
|
||||
#define ASM_GET_CODE asm_thumb_get_code
|
||||
#define ASM_GET_CODE_POS asm_thumb_get_code_pos
|
||||
#define ASM_GET_CODE_SIZE asm_thumb_get_code_size
|
||||
#define ASM_START_PASS asm_thumb_start_pass
|
||||
#define ASM_END_PASS asm_thumb_end_pass
|
||||
#define ASM_ENTRY asm_thumb_entry
|
||||
#define ASM_EXIT asm_thumb_exit
|
||||
|
||||
#define ASM_ALIGN asm_thumb_align
|
||||
#define ASM_DATA asm_thumb_data
|
||||
|
||||
#define ASM_LABEL_ASSIGN asm_thumb_label_assign
|
||||
#define ASM_JUMP asm_thumb_b_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
@ -425,24 +386,11 @@ STATIC byte mp_f_n_args[MP_F_NUMBER_OF] = {
|
||||
#define REG_LOCAL_3 ASM_ARM_REG_R6
|
||||
#define REG_LOCAL_NUM (3)
|
||||
|
||||
#define ASM_PASS_COMPUTE ASM_ARM_PASS_COMPUTE
|
||||
#define ASM_PASS_EMIT ASM_ARM_PASS_EMIT
|
||||
|
||||
#define ASM_T asm_arm_t
|
||||
#define ASM_NEW asm_arm_new
|
||||
#define ASM_FREE asm_arm_free
|
||||
#define ASM_GET_CODE asm_arm_get_code
|
||||
#define ASM_GET_CODE_POS asm_arm_get_code_pos
|
||||
#define ASM_GET_CODE_SIZE asm_arm_get_code_size
|
||||
#define ASM_START_PASS asm_arm_start_pass
|
||||
#define ASM_END_PASS asm_arm_end_pass
|
||||
#define ASM_ENTRY asm_arm_entry
|
||||
#define ASM_EXIT asm_arm_exit
|
||||
|
||||
#define ASM_ALIGN asm_arm_align
|
||||
#define ASM_DATA asm_arm_data
|
||||
|
||||
#define ASM_LABEL_ASSIGN asm_arm_label_assign
|
||||
#define ASM_JUMP asm_arm_b_label
|
||||
#define ASM_JUMP_IF_REG_ZERO(as, reg, label) \
|
||||
do { \
|
||||
@ -582,12 +530,14 @@ struct _emit_t {
|
||||
emit_t *EXPORT_FUN(new)(mp_obj_t *error_slot, mp_uint_t max_num_labels) {
|
||||
emit_t *emit = m_new0(emit_t, 1);
|
||||
emit->error_slot = error_slot;
|
||||
emit->as = ASM_NEW(max_num_labels);
|
||||
emit->as = m_new0(ASM_T, 1);
|
||||
mp_asm_base_init(&emit->as->base, max_num_labels);
|
||||
return emit;
|
||||
}
|
||||
|
||||
void EXPORT_FUN(free)(emit_t *emit) {
|
||||
ASM_FREE(emit->as, false);
|
||||
mp_asm_base_deinit(&emit->as->base, false);
|
||||
m_del_obj(ASM_T, emit->as);
|
||||
m_del(vtype_kind_t, emit->local_vtype, emit->local_vtype_alloc);
|
||||
m_del(stack_info_t, emit->stack_info, emit->stack_info_alloc);
|
||||
m_del_obj(emit_t, emit);
|
||||
@ -679,7 +629,7 @@ STATIC void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop
|
||||
emit->stack_info[i].vtype = VTYPE_UNBOUND;
|
||||
}
|
||||
|
||||
ASM_START_PASS(emit->as, pass == MP_PASS_EMIT ? ASM_PASS_EMIT : ASM_PASS_COMPUTE);
|
||||
mp_asm_base_start_pass(&emit->as->base, pass == MP_PASS_EMIT ? MP_ASM_PASS_EMIT : MP_ASM_PASS_COMPUTE);
|
||||
|
||||
// generate code for entry to function
|
||||
|
||||
@ -824,21 +774,21 @@ STATIC void emit_native_end_pass(emit_t *emit) {
|
||||
}
|
||||
|
||||
if (!emit->do_viper_types) {
|
||||
emit->prelude_offset = ASM_GET_CODE_POS(emit->as);
|
||||
ASM_DATA(emit->as, 1, emit->scope->scope_flags);
|
||||
ASM_DATA(emit->as, 1, emit->scope->num_pos_args);
|
||||
ASM_DATA(emit->as, 1, emit->scope->num_kwonly_args);
|
||||
ASM_DATA(emit->as, 1, emit->scope->num_def_pos_args);
|
||||
emit->prelude_offset = mp_asm_base_get_code_pos(&emit->as->base);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->scope_flags);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->num_pos_args);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->num_kwonly_args);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->num_def_pos_args);
|
||||
|
||||
// write code info
|
||||
#if MICROPY_PERSISTENT_CODE
|
||||
ASM_DATA(emit->as, 1, 5);
|
||||
ASM_DATA(emit->as, 1, emit->scope->simple_name);
|
||||
ASM_DATA(emit->as, 1, emit->scope->simple_name >> 8);
|
||||
ASM_DATA(emit->as, 1, emit->scope->source_file);
|
||||
ASM_DATA(emit->as, 1, emit->scope->source_file >> 8);
|
||||
mp_asm_base_data(&emit->as->base, 1, 5);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->simple_name >> 8);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file);
|
||||
mp_asm_base_data(&emit->as->base, 1, emit->scope->source_file >> 8);
|
||||
#else
|
||||
ASM_DATA(emit->as, 1, 1);
|
||||
mp_asm_base_data(&emit->as->base, 1, 1);
|
||||
#endif
|
||||
|
||||
// bytecode prelude: initialise closed over variables
|
||||
@ -846,13 +796,13 @@ STATIC void emit_native_end_pass(emit_t *emit) {
|
||||
id_info_t *id = &emit->scope->id_info[i];
|
||||
if (id->kind == ID_INFO_KIND_CELL) {
|
||||
assert(id->local_num < 255);
|
||||
ASM_DATA(emit->as, 1, id->local_num); // write the local which should be converted to a cell
|
||||
mp_asm_base_data(&emit->as->base, 1, id->local_num); // write the local which should be converted to a cell
|
||||
}
|
||||
}
|
||||
ASM_DATA(emit->as, 1, 255); // end of list sentinel
|
||||
mp_asm_base_data(&emit->as->base, 1, 255); // end of list sentinel
|
||||
|
||||
ASM_ALIGN(emit->as, ASM_WORD_SIZE);
|
||||
emit->const_table_offset = ASM_GET_CODE_POS(emit->as);
|
||||
mp_asm_base_align(&emit->as->base, ASM_WORD_SIZE);
|
||||
emit->const_table_offset = mp_asm_base_get_code_pos(&emit->as->base);
|
||||
|
||||
// write argument names as qstr objects
|
||||
// see comment in corresponding part of emitbc.c about the logic here
|
||||
@ -865,7 +815,7 @@ STATIC void emit_native_end_pass(emit_t *emit) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
ASM_DATA(emit->as, ASM_WORD_SIZE, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
|
||||
mp_asm_base_data(&emit->as->base, ASM_WORD_SIZE, (mp_uint_t)MP_OBJ_NEW_QSTR(qst));
|
||||
}
|
||||
|
||||
}
|
||||
@ -878,8 +828,8 @@ STATIC void emit_native_end_pass(emit_t *emit) {
|
||||
}
|
||||
|
||||
if (emit->pass == MP_PASS_EMIT) {
|
||||
void *f = ASM_GET_CODE(emit->as);
|
||||
mp_uint_t f_len = ASM_GET_CODE_SIZE(emit->as);
|
||||
void *f = mp_asm_base_get_code(&emit->as->base);
|
||||
mp_uint_t f_len = mp_asm_base_get_code_size(&emit->as->base);
|
||||
|
||||
// compute type signature
|
||||
// note that the lower 4 bits of a vtype are tho correct MP_NATIVE_TYPE_xxx
|
||||
@ -1255,7 +1205,7 @@ STATIC void emit_native_label_assign(emit_t *emit, mp_uint_t l) {
|
||||
emit_native_pre(emit);
|
||||
// need to commit stack because we can jump here from elsewhere
|
||||
need_stack_settled(emit);
|
||||
ASM_LABEL_ASSIGN(emit->as, l);
|
||||
mp_asm_base_label_assign(&emit->as->base, l);
|
||||
emit_post(emit);
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user