diff --git a/py/compile.c b/py/compile.c index 83c2cfd671..c1d49102be 100644 --- a/py/compile.c +++ b/py/compile.c @@ -88,7 +88,7 @@ py_parse_node_t fold_constants(py_parse_node_t pn) { int arg0 = PY_PARSE_NODE_LEAF_ARG(pns->nodes[0]); int arg1 = PY_PARSE_NODE_LEAF_ARG(pns->nodes[2]); if (PY_PARSE_NODE_IS_TOKEN_KIND(pns->nodes[1], PY_TOKEN_OP_DBL_LESS)) { -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON // can overflow; enabled only to compare with CPython pn = py_parse_node_new_leaf(PY_PARSE_NODE_SMALL_INT, arg0 << arg1); #endif @@ -127,7 +127,7 @@ py_parse_node_t fold_constants(py_parse_node_t pn) { int arg0 = PY_PARSE_NODE_LEAF_ARG(pns->nodes[0]); int arg1 = PY_PARSE_NODE_LEAF_ARG(pns->nodes[2]); if (PY_PARSE_NODE_IS_TOKEN_KIND(pns->nodes[1], PY_TOKEN_OP_STAR)) { -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON // can overflow; enabled only to compare with CPython pn = py_parse_node_new_leaf(PY_PARSE_NODE_SMALL_INT, arg0 * arg1); #endif @@ -162,7 +162,7 @@ py_parse_node_t fold_constants(py_parse_node_t pn) { } break; -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON case PN_power: // can overflow; enabled only to compare with CPython if (PY_PARSE_NODE_IS_SMALL_INT(pns->nodes[0]) && PY_PARSE_NODE_IS_NULL(pns->nodes[1]) && !PY_PARSE_NODE_IS_NULL(pns->nodes[2])) { @@ -265,7 +265,7 @@ void compile_generic_all_nodes(compiler_t *comp, py_parse_node_struct_t *pns) { } } -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON static bool cpython_c_tuple_is_const(py_parse_node_t pn) { if (!PY_PARSE_NODE_IS_LEAF(pn)) { return false; @@ -352,7 +352,7 @@ static void cpython_c_tuple(compiler_t *comp, py_parse_node_t pn, py_parse_node_ // funnelling all tuple creations through this function is purely so we can optionally agree with CPython void c_tuple(compiler_t *comp, py_parse_node_t pn, py_parse_node_struct_t *pns_list) { -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON cpython_c_tuple(comp, pn, pns_list); #else int total = 0; @@ -385,7 +385,7 @@ static bool node_is_const_true(py_parse_node_t pn) { return PY_PARSE_NODE_IS_TOKEN_KIND(pn, PY_TOKEN_KW_TRUE) || (PY_PARSE_NODE_IS_SMALL_INT(pn) && PY_PARSE_NODE_LEAF_ARG(pn) == 1); } -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON // the is_nested variable is purely to match with CPython, which doesn't fully optimise not's static void cpython_c_if_cond(compiler_t *comp, py_parse_node_t pn, bool jump_if, int label, bool is_nested) { if (node_is_const_false(pn)) { @@ -446,7 +446,7 @@ static void cpython_c_if_cond(compiler_t *comp, py_parse_node_t pn, bool jump_if #endif static void c_if_cond(compiler_t *comp, py_parse_node_t pn, bool jump_if, int label) { -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON cpython_c_if_cond(comp, pn, jump_if, label, false); #else if (node_is_const_false(pn)) { @@ -857,7 +857,7 @@ static bool compile_built_in_decorator(compiler_t *comp, int name_len, py_parse_ *emit_options = EMIT_OPT_NATIVE_PYTHON; } else if (attr == comp->qstr_viper) { *emit_options = EMIT_OPT_VIPER; -#if defined(MICROPY_EMIT_ENABLE_INLINE_THUMB) +#if MICROPY_EMIT_INLINE_THUMB } else if (attr == comp->qstr_asm_thumb) { *emit_options = EMIT_OPT_ASM_THUMB; #endif @@ -2748,7 +2748,7 @@ void py_compile(py_parse_node_t pn) { uint max_num_labels = 0; for (scope_t *s = comp->scope_head; s != NULL; s = s->next) { if (false) { -#ifdef MICROPY_EMIT_ENABLE_INLINE_THUMB +#if MICROPY_EMIT_INLINE_THUMB } else if (s->emit_options == EMIT_OPT_ASM_THUMB) { compile_scope_inline_asm(comp, s, PASS_1); #endif @@ -2771,18 +2771,18 @@ void py_compile(py_parse_node_t pn) { emit_pass1_free(comp->emit); // compile pass 2 and 3 -#if !defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if !MICROPY_EMIT_CPYTHON emit_t *emit_bc = NULL; emit_t *emit_native = NULL; #endif -#if defined(MICROPY_EMIT_ENABLE_INLINE_THUMB) +#if MICROPY_EMIT_INLINE_THUMB emit_inline_asm_t *emit_inline_thumb = NULL; #endif for (scope_t *s = comp->scope_head; s != NULL; s = s->next) { if (false) { // dummy -#if defined(MICROPY_EMIT_ENABLE_INLINE_THUMB) +#if MICROPY_EMIT_INLINE_THUMB } else if (s->emit_options == EMIT_OPT_ASM_THUMB) { // inline assembly for thumb if (emit_inline_thumb == NULL) { @@ -2800,19 +2800,19 @@ void py_compile(py_parse_node_t pn) { // choose the emit type -#if defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if MICROPY_EMIT_CPYTHON comp->emit = emit_cpython_new(max_num_labels); comp->emit_method_table = &emit_cpython_method_table; #else switch (s->emit_options) { case EMIT_OPT_NATIVE_PYTHON: case EMIT_OPT_VIPER: -#if defined(MICROPY_EMIT_ENABLE_X64) +#if MICROPY_EMIT_X64 if (emit_native == NULL) { emit_native = emit_native_x64_new(max_num_labels); } comp->emit_method_table = &emit_native_x64_method_table; -#elif defined(MICROPY_EMIT_ENABLE_THUMB) +#elif MICROPY_EMIT_THUMB if (emit_native == NULL) { emit_native = emit_native_thumb_new(max_num_labels); } diff --git a/py/emitcpy.c b/py/emitcpy.c index 596e04eb82..6e3543da31 100644 --- a/py/emitcpy.c +++ b/py/emitcpy.c @@ -14,7 +14,7 @@ #include "runtime.h" #include "emit.h" -#ifdef MICROPY_EMIT_ENABLE_CPYTHON +#if MICROPY_EMIT_CPYTHON struct _emit_t { int pass; @@ -925,4 +925,4 @@ const emit_method_table_t emit_cpython_method_table = { emit_cpy_yield_from, }; -#endif // MICROPY_EMIT_ENABLE_CPYTHON +#endif // MICROPY_EMIT_CPYTHON diff --git a/py/emitinlinethumb.c b/py/emitinlinethumb.c index 74bc6d129f..b545428c81 100644 --- a/py/emitinlinethumb.c +++ b/py/emitinlinethumb.c @@ -14,7 +14,7 @@ #include "emit.h" #include "asmthumb.h" -#ifdef MICROPY_EMIT_ENABLE_INLINE_THUMB +#if MICROPY_EMIT_INLINE_THUMB struct _emit_inline_asm_t { int pass; @@ -204,4 +204,4 @@ const emit_inline_asm_method_table_t emit_inline_thumb_method_table = { emit_inline_thumb_op, }; -#endif // MICROPY_EMIT_ENABLE_INLINE_THUMB +#endif // MICROPY_EMIT_INLINE_THUMB diff --git a/py/emitnative.c b/py/emitnative.c index 0f09c079a0..1e20f5eadb 100644 --- a/py/emitnative.c +++ b/py/emitnative.c @@ -32,9 +32,9 @@ #include "emit.h" // wrapper around everything in this file -#if defined(N_X64) || defined(N_THUMB) +#if N_X64 || N_THUMB -#if defined(N_X64) +#if N_X64 // x64 specific stuff @@ -55,7 +55,7 @@ #define ASM_MOV_REG_TO_REG(reg_src, reg_dest) asm_x64_mov_r64_to_r64(emit->as, (reg_src), (reg_dest)) #define ASM_MOV_LOCAL_ADDR_TO_REG(local_num, reg) asm_x64_mov_local_addr_to_r64(emit->as, (local_num), (reg)) -#elif defined(N_THUMB) +#elif N_THUMB // thumb specific stuff @@ -123,9 +123,9 @@ struct _emit_t { scope_t *scope; -#if defined(N_X64) +#if N_X64 asm_x64_t *as; -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_t *as; #endif }; @@ -135,9 +135,9 @@ emit_t *EXPORT_FUN(new)(uint max_num_labels) { emit->do_viper_types = false; emit->local_vtype = NULL; emit->stack_info = NULL; -#if defined(N_X64) +#if N_X64 emit->as = asm_x64_new(max_num_labels); -#elif defined(N_THUMB) +#elif N_THUMB emit->as = asm_thumb_new(max_num_labels); #endif return emit; @@ -182,9 +182,9 @@ static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop } } -#if defined(N_X64) +#if N_X64 asm_x64_start_pass(emit->as, pass); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_start_pass(emit->as, pass); #endif @@ -198,14 +198,14 @@ static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop emit->stack_start = num_locals; num_locals += scope->stack_size; } -#if defined(N_X64) +#if N_X64 asm_x64_entry(emit->as, num_locals); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_entry(emit->as, num_locals); #endif // initialise locals from parameters -#if defined(N_X64) +#if N_X64 for (int i = 0; i < scope->num_params; i++) { if (i == 0) { asm_x64_mov_r64_to_r64(emit->as, REG_ARG_1, REG_LOCAL_1); @@ -218,7 +218,7 @@ static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop assert(0); } } -#elif defined(N_THUMB) +#elif N_THUMB for (int i = 0; i < scope->num_params; i++) { if (i == 0) { asm_thumb_mov_reg_reg(emit->as, REG_LOCAL_1, REG_ARG_1); @@ -239,12 +239,12 @@ static void emit_native_start_pass(emit_t *emit, pass_kind_t pass, scope_t *scop } static void emit_native_end_pass(emit_t *emit) { -#if defined(N_X64) +#if N_X64 if (!emit->last_emit_was_return_value) { asm_x64_exit(emit->as); } asm_x64_end_pass(emit->as); -#elif defined(N_THUMB) +#elif N_THUMB if (!emit->last_emit_was_return_value) { asm_thumb_exit(emit->as); } @@ -257,10 +257,10 @@ static void emit_native_end_pass(emit_t *emit) { } if (emit->pass == PASS_3) { -#if defined(N_X64) +#if N_X64 py_fun_t f = asm_x64_get_code(emit->as); rt_assign_native_code(emit->scope->unique_code_id, f, asm_x64_get_code_size(emit->as), emit->scope->num_params); -#elif defined(N_THUMB) +#elif N_THUMB py_fun_t f = asm_thumb_get_code(emit->as); rt_assign_native_code(emit->scope->unique_code_id, f, asm_thumb_get_code_size(emit->as), emit->scope->num_params); #endif @@ -446,9 +446,9 @@ static void emit_get_stack_pointer_to_reg_for_push(emit_t *emit, int reg_dest, i } static void emit_call(emit_t *emit, rt_fun_kind_t fun_kind, void *fun) { -#if defined(N_X64) +#if N_X64 asm_x64_call_ind(emit->as, fun, REG_RAX); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_bl_ind(emit->as, rt_fun_table[fun_kind], fun_kind, REG_R3); #endif } @@ -483,9 +483,9 @@ static void emit_native_delete_id(emit_t *emit, qstr qstr) { } static void emit_native_label_assign(emit_t *emit, int l) { -#if defined(N_X64) +#if N_X64 asm_x64_label_assign(emit->as, l); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_label_assign(emit->as, l); #endif } @@ -607,7 +607,7 @@ static void emit_native_load_fast(emit_t *emit, qstr qstr, int local_num) { printf("ViperTypeError: local %s used before type known\n", qstr_str(qstr)); } emit_pre(emit); -#if defined(N_X64) +#if N_X64 if (local_num == 0) { emit_post_push_reg(emit, vtype, REG_LOCAL_1); } else { @@ -615,7 +615,7 @@ static void emit_native_load_fast(emit_t *emit, qstr qstr, int local_num) { asm_x64_mov_local_to_r64(emit->as, local_num - 1, REG_RAX); emit_post_push_reg(emit, vtype, REG_RAX); } -#elif defined(N_THUMB) +#elif N_THUMB if (local_num == 0) { emit_post_push_reg(emit, vtype, REG_LOCAL_1); } else if (local_num == 1) { @@ -681,14 +681,14 @@ static void emit_native_load_build_class(emit_t *emit) { static void emit_native_store_fast(emit_t *emit, qstr qstr, int local_num) { vtype_kind_t vtype; -#if defined(N_X64) +#if N_X64 if (local_num == 0) { emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1); } else { emit_pre_pop_reg(emit, &vtype, REG_RAX); asm_x64_mov_r64_to_local(emit->as, REG_RAX, local_num - 1); } -#elif defined(N_THUMB) +#elif N_THUMB if (local_num == 0) { emit_pre_pop_reg(emit, &vtype, REG_LOCAL_1); } else if (local_num == 1) { @@ -826,9 +826,9 @@ static void emit_native_rot_three(emit_t *emit) { static void emit_native_jump(emit_t *emit, int label) { emit_pre(emit); -#if defined(N_X64) +#if N_X64 asm_x64_jmp_label(emit->as, label); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_b_label(emit->as, label); #endif emit_post(emit); @@ -845,10 +845,10 @@ static void emit_native_pop_jump_if_false(emit_t *emit, int label) { printf("ViperTypeError: expecting a bool or pyobj, got %d\n", vtype); assert(0); } -#if defined(N_X64) +#if N_X64 asm_x64_test_r8_with_r8(emit->as, REG_RET, REG_RET); asm_x64_jcc_label(emit->as, JCC_JZ, label); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_cmp_reg_bz_label(emit->as, REG_RET, label); #endif emit_post(emit); @@ -925,9 +925,9 @@ static void emit_native_binary_op(emit_t *emit, rt_binary_op_t op) { emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2); if (vtype_lhs == VTYPE_INT && vtype_rhs == VTYPE_INT) { assert(op == RT_BINARY_OP_ADD); -#if defined(N_X64) +#if N_X64 asm_x64_add_r64_to_r64(emit->as, REG_ARG_3, REG_ARG_2); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_add_reg_reg_reg(emit->as, REG_ARG_2, REG_ARG_2, REG_ARG_3); #endif emit_post_push_reg(emit, VTYPE_INT, REG_ARG_2); @@ -945,11 +945,11 @@ static void emit_native_compare_op(emit_t *emit, rt_compare_op_t op) { emit_pre_pop_reg_reg(emit, &vtype_rhs, REG_ARG_3, &vtype_lhs, REG_ARG_2); if (vtype_lhs == VTYPE_INT && vtype_rhs == VTYPE_INT) { assert(op == RT_COMPARE_OP_LESS); -#if defined(N_X64) +#if N_X64 asm_x64_xor_r64_to_r64(emit->as, REG_RET, REG_RET); asm_x64_cmp_r64_with_r64(emit->as, REG_ARG_3, REG_ARG_2); asm_x64_setcc_r8(emit->as, JCC_JL, REG_RET); -#elif defined(N_THUMB) +#elif N_THUMB asm_thumb_cmp_reg_reg(emit->as, REG_ARG_2, REG_ARG_3); asm_thumb_ite_ge(emit->as); asm_thumb_movs_rlo_i8(emit->as, REG_RET, 0); // if r0 >= r1 @@ -1108,10 +1108,10 @@ static void emit_native_return_value(emit_t *emit) { assert(vtype == VTYPE_PYOBJ); } emit->last_emit_was_return_value = true; -#if defined(N_X64) +#if N_X64 //asm_x64_call_ind(emit->as, 0, REG_RAX); to seg fault for debugging with gdb asm_x64_exit(emit->as); -#elif defined(N_THUMB) +#elif N_THUMB //asm_thumb_call_ind(emit->as, 0, REG_R0); to seg fault for debugging with gdb asm_thumb_exit(emit->as); #endif @@ -1226,4 +1226,4 @@ const emit_method_table_t EXPORT_FUN(method_table) = { emit_native_yield_from, }; -#endif // defined(N_X64) || defined(N_THUMB) +#endif // N_X64 || N_THUMB diff --git a/py/emitthumb.c b/py/emitthumb.c index 1866e00b9a..ef79854eba 100644 --- a/py/emitthumb.c +++ b/py/emitthumb.c @@ -14,7 +14,7 @@ #include "emit.h" #include "asmthumb.h" -#ifdef MICROPY_EMIT_ENABLE_THUMB +#if MICROPY_EMIT_THUMB #define REG_LOCAL_1 (REG_R4) #define REG_LOCAL_2 (REG_R5) @@ -775,4 +775,4 @@ const emit_method_table_t emit_thumb_method_table = { emit_thumb_yield_from, }; -#endif // MICROPY_EMIT_ENABLE_THUMB +#endif // MICROPY_EMIT_THUMB diff --git a/py/runtime.c b/py/runtime.c index ae24646295..f06c9203a2 100644 --- a/py/runtime.c +++ b/py/runtime.c @@ -26,14 +26,14 @@ typedef machine_int_t py_small_int_t; #define FROM_SMALL_INT(o) (((py_small_int_t)(o)) >> 1) #define TO_SMALL_INT(o) ((py_obj_t)(((o) << 1) | 1)) -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT typedef machine_float_t float_t; #endif typedef enum { O_CONST, O_STR, -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT O_FLOAT, #endif O_FUN_0, @@ -74,7 +74,7 @@ struct _py_obj_base_t { union { const char *id; qstr u_str; -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT float_t u_flt; #endif struct { // for O_FUN_[012N] @@ -257,7 +257,7 @@ py_obj_t py_obj_new_str(qstr qstr) { return (py_obj_t)o; } -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT py_obj_t py_obj_new_float(float_t val) { py_obj_base_t *o = m_new(py_obj_base_t, 1); o->kind = O_FLOAT; @@ -511,7 +511,7 @@ const char *py_obj_get_type_str(py_obj_t o_in) { } case O_STR: return "str"; -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT case O_FLOAT: return "float"; #endif @@ -554,7 +554,7 @@ void py_obj_print(py_obj_t o_in) { // TODO need to escape chars etc printf("'%s'", qstr_str(o->u_str)); break; -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT case O_FLOAT: printf("%f", o->u_flt); break; @@ -716,7 +716,7 @@ py_obj_t rt_binary_op(int op, py_obj_t lhs, py_obj_t rhs) { case RT_BINARY_OP_SUBTRACT: val = FROM_SMALL_INT(lhs) - FROM_SMALL_INT(rhs); break; case RT_BINARY_OP_MULTIPLY: val = FROM_SMALL_INT(lhs) * FROM_SMALL_INT(rhs); break; case RT_BINARY_OP_FLOOR_DIVIDE: val = FROM_SMALL_INT(lhs) / FROM_SMALL_INT(rhs); break; -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT case RT_BINARY_OP_TRUE_DIVIDE: return py_obj_new_float((float_t)FROM_SMALL_INT(lhs) / (float_t)FROM_SMALL_INT(rhs)); #endif default: printf("%d\n", op); assert(0); val = 0; @@ -861,7 +861,7 @@ machine_uint_t rt_convert_obj_for_inline_asm(py_obj_t obj) { // pointer to the string (it's probably constant though!) return (machine_uint_t)qstr_str(o->u_str); -#ifdef MICROPY_ENABLE_FLOAT +#if MICROPY_ENABLE_FLOAT case O_FLOAT: // convert float to int (could also pass in float registers) return (machine_int_t)o->u_flt; diff --git a/unix/main.c b/unix/main.c index 2a1103c45f..eb120da019 100644 --- a/unix/main.c +++ b/unix/main.c @@ -42,7 +42,7 @@ int main(int argc, char **argv) { py_lexer_free(lex); -#if !defined(MICROPY_EMIT_ENABLE_CPYTHON) +#if !MICROPY_EMIT_CPYTHON if (1) { // execute it py_obj_t module_fun = rt_make_function_from_id(1); diff --git a/unix/mpyconfig.h b/unix/mpyconfig.h index 117892b3d6..d9e03f7a85 100644 --- a/unix/mpyconfig.h +++ b/unix/mpyconfig.h @@ -1,10 +1,10 @@ // options to control how Micro Python is built -//#define MICROPY_ENABLE_FLOAT -#define MICROPY_EMIT_ENABLE_CPYTHON -#define MICROPY_EMIT_ENABLE_X64 -//#define MICROPY_EMIT_ENABLE_THUMB -#define MICROPY_EMIT_ENABLE_INLINE_THUMB +#define MICROPY_ENABLE_FLOAT (1) +#define MICROPY_EMIT_CPYTHON (1) +#define MICROPY_EMIT_X64 (0) +#define MICROPY_EMIT_THUMB (0) +#define MICROPY_EMIT_INLINE_THUMB (0) // type definitions for the specific machine