Merge MicroPython v1.15 into CircuitPython

This commit is contained in:
Scott Shawcroft 2021-05-11 17:31:27 -07:00
commit 966d25c6a5
No known key found for this signature in database
GPG Key ID: 0DFD512649C052DA
70 changed files with 1809 additions and 911 deletions

32
.git-blame-ignore-revs Normal file
View File

@ -0,0 +1,32 @@
# tests/run-tests.py: Reformat with Black.
2a38d7103672580882fb621a5b76e8d26805d593
# all: Update Python code to conform to latest black formatting.
06659077a81b85882254cf0953c33b27614e018e
# tools/uncrustify: Enable more opts to remove space between func and '('.
77ed6f69ac35c1663a5633a8ee1d8a2446542204
# tools/codeformat.py: Include extmod/{btstack,nimble} in code formatting.
026fda605e03113d6e753290d65fed774418bc53
# all: Format code to add space after C++-style comment start.
84fa3312cfa7d2237d4b56952f2cd6e3591210c4
# tests: Format all Python code with black, except tests in basics subdir.
3dc324d3f1312e40d3a8ed87e7244966bb756f26
# all: Remove spaces inside and around parenthesis.
1a3e386c67e03a79eb768cb6e9f6777e002d6660
# all: Remove spaces between nested paren and inside function arg paren.
feb25775851ba0c04b8d1013716f442258879d9c
# all: Reformat C and Python source code with tools/codeformat.py.
69661f3343bedf86e514337cff63d96cc42f8859
# stm32/usbdev: Convert files to unix line endings.
abde0fa2267f9062b28c3c015d7662a550125cc6
# all: Remove trailing spaces, per coding conventions.
761e4c7ff62896c7d8f8c3dfc3cc98a4cc4f2f6f

View File

@ -73,16 +73,16 @@ jobs:
run: |
make -C ports/unix VARIANT=coverage -j2
- name: Test all
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests -j1
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1
working-directory: tests
- name: Native Tests
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests -j1 --emit native
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --emit native
working-directory: tests
- name: mpy Tests
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests -j1 --mpy-cross-flags='-mcache-lookup-bc' --via-mpy -d basics float micropython
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --mpy-cross-flags='-mcache-lookup-bc' --via-mpy -d basics float micropython
working-directory: tests
- name: Native mpy Tests
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests -j1 --mpy-cross-flags='-mcache-lookup-bc' --via-mpy --emit native -d basics float micropython
run: MICROPY_CPYTHON3=python3.8 MICROPY_MICROPYTHON=../ports/unix/micropython-coverage ./run-tests.py -j1 --mpy-cross-flags='-mcache-lookup-bc' --via-mpy --emit native -d basics float micropython
working-directory: tests
- name: Build mpy-cross.static-aarch64
run: make -C mpy-cross -j2 -f Makefile.static-aarch64

View File

@ -40,6 +40,10 @@ Core functions
Returns the corresponding `Task` object.
.. function:: current_task()
Return the `Task` object associated with the currently running task.
.. function:: run(coro)
Create a new task from the given coroutine and run it until it completes.

93
extmod/extmod.cmake Normal file
View File

@ -0,0 +1,93 @@
# CMake fragment for MicroPython extmod component
set(MICROPY_EXTMOD_DIR "${MICROPY_DIR}/extmod")
set(MICROPY_OOFATFS_DIR "${MICROPY_DIR}/lib/oofatfs")
set(MICROPY_SOURCE_EXTMOD
${MICROPY_DIR}/lib/embed/abort_.c
${MICROPY_DIR}/lib/utils/printf.c
${MICROPY_EXTMOD_DIR}/machine_i2c.c
${MICROPY_EXTMOD_DIR}/machine_mem.c
${MICROPY_EXTMOD_DIR}/machine_pulse.c
${MICROPY_EXTMOD_DIR}/machine_signal.c
${MICROPY_EXTMOD_DIR}/machine_spi.c
${MICROPY_EXTMOD_DIR}/modbluetooth.c
${MICROPY_EXTMOD_DIR}/modbtree.c
${MICROPY_EXTMOD_DIR}/modframebuf.c
${MICROPY_EXTMOD_DIR}/modonewire.c
${MICROPY_EXTMOD_DIR}/moduasyncio.c
${MICROPY_EXTMOD_DIR}/modubinascii.c
${MICROPY_EXTMOD_DIR}/moducryptolib.c
${MICROPY_EXTMOD_DIR}/moductypes.c
${MICROPY_EXTMOD_DIR}/moduhashlib.c
${MICROPY_EXTMOD_DIR}/moduheapq.c
${MICROPY_EXTMOD_DIR}/modujson.c
${MICROPY_EXTMOD_DIR}/modurandom.c
${MICROPY_EXTMOD_DIR}/modure.c
${MICROPY_EXTMOD_DIR}/moduselect.c
${MICROPY_EXTMOD_DIR}/modussl_axtls.c
${MICROPY_EXTMOD_DIR}/modussl_mbedtls.c
${MICROPY_EXTMOD_DIR}/modutimeq.c
${MICROPY_EXTMOD_DIR}/moduwebsocket.c
${MICROPY_EXTMOD_DIR}/moduzlib.c
${MICROPY_EXTMOD_DIR}/modwebrepl.c
${MICROPY_EXTMOD_DIR}/uos_dupterm.c
${MICROPY_EXTMOD_DIR}/utime_mphal.c
${MICROPY_EXTMOD_DIR}/vfs.c
${MICROPY_EXTMOD_DIR}/vfs_blockdev.c
${MICROPY_EXTMOD_DIR}/vfs_fat.c
${MICROPY_EXTMOD_DIR}/vfs_fat_diskio.c
${MICROPY_EXTMOD_DIR}/vfs_fat_file.c
${MICROPY_EXTMOD_DIR}/vfs_lfs.c
${MICROPY_EXTMOD_DIR}/vfs_posix.c
${MICROPY_EXTMOD_DIR}/vfs_posix_file.c
${MICROPY_EXTMOD_DIR}/vfs_reader.c
${MICROPY_EXTMOD_DIR}/virtpin.c
${MICROPY_EXTMOD_DIR}/nimble/modbluetooth_nimble.c
)
# Library for btree module and associated code
set(MICROPY_LIB_BERKELEY_DIR "${MICROPY_DIR}/lib/berkeley-db-1.xx")
if(EXISTS "${MICROPY_LIB_BERKELEY_DIR}/btree/bt_close.c")
add_library(micropy_extmod_btree OBJECT
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_close.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_conv.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_debug.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_delete.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_get.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_open.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_overflow.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_page.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_put.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_search.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_seq.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_split.c
${MICROPY_LIB_BERKELEY_DIR}/btree/bt_utils.c
${MICROPY_LIB_BERKELEY_DIR}/mpool/mpool.c
)
target_include_directories(micropy_extmod_btree PRIVATE
${MICROPY_LIB_BERKELEY_DIR}/PORT/include
)
target_compile_definitions(micropy_extmod_btree PRIVATE
__DBINTERFACE_PRIVATE=1
mpool_error=printf
abort=abort_
"virt_fd_t=void*"
)
# The include directories and compile definitions below are needed to build
# modbtree.c and should be added to the main MicroPython target.
list(APPEND MICROPY_INC_CORE
"${MICROPY_LIB_BERKELEY_DIR}/PORT/include"
)
list(APPEND MICROPY_DEF_CORE
__DBINTERFACE_PRIVATE=1
"virt_fd_t=void*"
)
endif()

View File

@ -128,7 +128,7 @@ STATIC mp_obj_t select_select(size_t n_args, const mp_obj_t *args) {
// poll the objects
mp_uint_t n_ready = poll_map_poll(&poll_map, rwx_len);
if (n_ready > 0 || (timeout != -1 && mp_hal_ticks_ms() - start_tick >= timeout)) {
if (n_ready > 0 || (timeout != (mp_uint_t)-1 && mp_hal_ticks_ms() - start_tick >= timeout)) {
// one or more objects are ready, or we had a timeout
mp_obj_t list_array[3];
list_array[0] = mp_obj_new_list(rwx_len[0], NULL);
@ -230,7 +230,7 @@ STATIC mp_uint_t poll_poll_internal(uint n_args, const mp_obj_t *args) {
for (;;) {
// poll the objects
n_ready = poll_map_poll(&self->poll_map, NULL);
if (n_ready > 0 || (timeout != -1 && mp_hal_ticks_ms() - start_tick >= timeout)) {
if (n_ready > 0 || (timeout != (mp_uint_t)-1 && mp_hal_ticks_ms() - start_tick >= timeout)) {
break;
}
MICROPY_EVENT_POLL_HOOK

View File

@ -10,6 +10,7 @@ _attrs = {
"wait_for_ms": "funcs",
"gather": "funcs",
"Event": "event",
"ThreadSafeFlag": "event",
"Lock": "lock",
"open_connection": "stream",
"start_server": "stream",

View File

@ -264,6 +264,10 @@ def get_event_loop(runq_len=0, waitq_len=0):
return Loop
def current_task():
return cur_task
def new_event_loop():
global _task_queue, _io_queue
# TaskQueue of Task instances

View File

@ -14,6 +14,8 @@ class Event:
def set(self):
# Event becomes set, schedule any tasks waiting on it
# Note: This must not be called from anything except the thread running
# the asyncio loop (i.e. neither hard or soft IRQ, or a different thread).
while self.waiting.peek():
core._task_queue.push_head(self.waiting.pop_head())
self.state = True

View File

@ -137,7 +137,11 @@ STATIC mp_uint_t vfs_posix_file_write(mp_obj_t o_in, const void *buf, mp_uint_t
STATIC mp_uint_t vfs_posix_file_ioctl(mp_obj_t o_in, mp_uint_t request, uintptr_t arg, int *errcode) {
mp_obj_vfs_posix_file_t *o = MP_OBJ_TO_PTR(o_in);
check_fd_is_open(o);
if (request != MP_STREAM_CLOSE) {
check_fd_is_open(o);
}
switch (request) {
case MP_STREAM_FLUSH: {
int ret;

View File

@ -39,6 +39,8 @@ typedef uintptr_t gc_helper_regs_t[6];
typedef uintptr_t gc_helper_regs_t[4];
#elif defined(__thumb2__) || defined(__thumb__) || defined(__arm__)
typedef uintptr_t gc_helper_regs_t[10];
#elif defined(__aarch64__)
typedef uintptr_t gc_helper_regs_t[11]; // x19-x29
#endif
#endif

View File

@ -123,6 +123,33 @@ STATIC void gc_helper_get_regs(gc_helper_regs_t arr) {
arr[9] = r13;
}
#elif defined(__aarch64__)
STATIC void gc_helper_get_regs(gc_helper_regs_t arr) {
const register long x19 asm ("x19");
const register long x20 asm ("x20");
const register long x21 asm ("x21");
const register long x22 asm ("x22");
const register long x23 asm ("x23");
const register long x24 asm ("x24");
const register long x25 asm ("x25");
const register long x26 asm ("x26");
const register long x27 asm ("x27");
const register long x28 asm ("x28");
const register long x29 asm ("x29");
arr[0] = x19;
arr[1] = x20;
arr[2] = x21;
arr[3] = x22;
arr[4] = x23;
arr[5] = x24;
arr[6] = x25;
arr[7] = x26;
arr[8] = x27;
arr[9] = x28;
arr[10] = x29;
}
#else
#error "Architecture not supported for gc_helper_get_regs. Set MICROPY_GCREGS_SETJMP to use the fallback implementation."

View File

@ -149,7 +149,7 @@ STATIC void pre_process_options(int argc, char **argv) {
heap_size *= 1024 * 1024;
}
if (word_adjust) {
heap_size = heap_size * BYTES_PER_WORD / 4;
heap_size = heap_size * MP_BYTES_PER_OBJ_WORD / 4;
}
} else {
exit(usage(argv));
@ -161,7 +161,7 @@ STATIC void pre_process_options(int argc, char **argv) {
}
MP_NOINLINE int main_(int argc, char **argv) {
mp_stack_set_limit(40000 * (BYTES_PER_WORD / 4));
mp_stack_set_limit(40000 * (sizeof(void *) / 4));
pre_process_options(argc, argv);

View File

@ -226,26 +226,23 @@ include $(TOP)/py/mkrules.mk
.PHONY: test test_full
test: $(PROG) $(TOP)/tests/run-tests
test: $(PROG) $(TOP)/tests/run-tests.py
$(eval DIRNAME=ports/$(notdir $(CURDIR)))
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests --auto-jobs
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests.py --auto-jobs
test_full: $(PROG) $(TOP)/tests/run-tests
test_full: $(PROG) $(TOP)/tests/run-tests.py
$(eval DIRNAME=ports/$(notdir $(CURDIR)))
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests -d thread
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests --emit native
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests --via-mpy $(RUN_TESTS_MPY_CROSS_FLAGS) -d basics float micropython
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests --via-mpy $(RUN_TESTS_MPY_CROSS_FLAGS) --emit native -d basics float micropython
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests.py
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests.py -d thread
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests.py --emit native
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests.py --via-mpy $(RUN_TESTS_MPY_CROSS_FLAGS) -d basics float micropython
cd $(TOP)/tests && MICROPY_MICROPYTHON=../$(DIRNAME)/$(PROG) ./run-tests.py --via-mpy $(RUN_TESTS_MPY_CROSS_FLAGS) --emit native -d basics float micropython
cat $(TOP)/tests/basics/0prelim.py | ./$(PROG) | grep -q 'abc'
test_gcov: test_full
gcov -o $(BUILD)/py $(TOP)/py/*.c
gcov -o $(BUILD)/extmod $(TOP)/extmod/*.c
coverage_clean:
$(MAKE) V=2 BUILD=build-coverage PROG=micropython_coverage clean
# build an interpreter for fuzzing
fuzz:
$(MAKE) \

View File

@ -340,6 +340,9 @@ STATIC int invalid_args(void) {
STATIC void pre_process_options(int argc, char **argv) {
for (int a = 1; a < argc; a++) {
if (argv[a][0] == '-') {
if (strcmp(argv[a], "-c") == 0 || strcmp(argv[a], "-m") == 0) {
break; // Everything after this is a command/module and arguments for it
}
if (strcmp(argv[a], "-h") == 0) {
print_help(argv);
exit(0);
@ -386,7 +389,7 @@ STATIC void pre_process_options(int argc, char **argv) {
goto invalid_arg;
}
if (word_adjust) {
heap_size = heap_size * BYTES_PER_WORD / 4;
heap_size = heap_size * MP_BYTES_PER_OBJ_WORD / 4;
}
// If requested size too small, we'll crash anyway
if (heap_size < 700) {
@ -399,6 +402,8 @@ STATIC void pre_process_options(int argc, char **argv) {
}
a++;
}
} else {
break; // Not an option but a file
}
}
}
@ -445,7 +450,7 @@ MP_NOINLINE int main_(int argc, char **argv) {
signal(SIGPIPE, SIG_IGN);
#endif
mp_stack_set_limit(40000 * (BYTES_PER_WORD / 4));
mp_stack_set_limit(40000 * (sizeof(void *) / 4));
pre_process_options(argc, argv);
@ -571,11 +576,10 @@ MP_NOINLINE int main_(int argc, char **argv) {
if (a + 1 >= argc) {
return invalid_args();
}
set_sys_argv(argv, a + 1, a); // The -c becomes first item of sys.argv, as in CPython
set_sys_argv(argv, argc, a + 2); // Then what comes after the command
ret = do_str(argv[a + 1]);
if (ret & FORCED_EXIT) {
break;
}
a += 1;
break;
} else if (strcmp(argv[a], "-m") == 0) {
if (a + 1 >= argc) {
return invalid_args();
@ -595,7 +599,12 @@ MP_NOINLINE int main_(int argc, char **argv) {
mp_obj_t mod;
nlr_buf_t nlr;
bool subpkg_tried = false;
// Allocating subpkg_tried on the stack can lead to compiler warnings about this
// variable being clobbered when nlr is implemented using setjmp/longjmp. Its
// value must be preserved across calls to setjmp/longjmp.
static bool subpkg_tried;
subpkg_tried = false;
reimport:
if (nlr_push(&nlr) == 0) {

View File

@ -29,6 +29,10 @@
#if MICROPY_PY_USELECT_POSIX
#if MICROPY_PY_USELECT
#error "Can't have both MICROPY_PY_USELECT and MICROPY_PY_USELECT_POSIX."
#endif
#include <stdio.h>
#include <errno.h>
#include <poll.h>

View File

@ -57,6 +57,11 @@ bool mp_bluetooth_hci_poll(void) {
return false;
}
bool mp_bluetooth_hci_active(void) {
return mp_bluetooth_btstack_state != MP_BLUETOOTH_BTSTACK_STATE_OFF
&& mp_bluetooth_btstack_state != MP_BLUETOOTH_BTSTACK_STATE_TIMEOUT;
}
// The IRQ functionality in btstack_run_loop_embedded.c is not used, so the
// following three functions are empty.

View File

@ -206,7 +206,7 @@ void mp_thread_start(void) {
void mp_thread_create(void *(*entry)(void *), void *arg, size_t *stack_size) {
// default stack size is 8k machine-words
if (*stack_size == 0) {
*stack_size = 8192 * BYTES_PER_WORD;
*stack_size = 8192 * sizeof(void *);
}
// minimum stack size is set by pthreads

View File

@ -335,7 +335,7 @@ void mp_binary_set_val(char struct_type, char val_type, mp_obj_t val_in, byte *p
double f;
} fp_dp;
fp_dp.f = mp_obj_get_float_to_d(val_in);
if (BYTES_PER_WORD == 8) {
if (MP_BYTES_PER_OBJ_WORD == 8) {
val = fp_dp.i64;
} else {
int be = struct_type == '>';
@ -361,7 +361,7 @@ void mp_binary_set_val(char struct_type, char val_type, mp_obj_t val_in, byte *p
// Small int checking is separate, to be fast.
mp_small_int_buffer_overflow_check(val, size, signed_type);
// zero/sign extend if needed
if (BYTES_PER_WORD < 8 && size > sizeof(val)) {
if (MP_BYTES_PER_OBJ_WORD < 8 && size > sizeof(val)) {
int c = (is_signed(val_type) && (mp_int_t)val < 0) ? 0xff : 0x00;
memset(p, c, size);
if (struct_type == '>') {

View File

@ -36,7 +36,7 @@
#if MICROPY_ENABLE_COMPILER
#define BYTES_FOR_INT ((BYTES_PER_WORD * 8 + 6) / 7)
#define BYTES_FOR_INT ((MP_BYTES_PER_OBJ_WORD * 8 + 6) / 7)
#define DUMMY_DATA_SIZE (BYTES_FOR_INT)
struct _emit_t {

View File

@ -126,9 +126,9 @@ void gc_init(void *start, void *end) {
// => T = A * (1 + BLOCKS_PER_ATB / BLOCKS_PER_FTB + BLOCKS_PER_ATB * BYTES_PER_BLOCK)
size_t total_byte_len = (byte *)end - (byte *)start;
#if MICROPY_ENABLE_FINALISER
MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len * BITS_PER_BYTE / (BITS_PER_BYTE + BITS_PER_BYTE * BLOCKS_PER_ATB / BLOCKS_PER_FTB + BITS_PER_BYTE * BLOCKS_PER_ATB * BYTES_PER_BLOCK);
MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len * MP_BITS_PER_BYTE / (MP_BITS_PER_BYTE + MP_BITS_PER_BYTE * BLOCKS_PER_ATB / BLOCKS_PER_FTB + MP_BITS_PER_BYTE * BLOCKS_PER_ATB * BYTES_PER_BLOCK);
#else
MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len / (1 + BITS_PER_BYTE / 2 * BYTES_PER_BLOCK);
MP_STATE_MEM(gc_alloc_table_byte_len) = total_byte_len / (1 + MP_BITS_PER_BYTE / 2 * BYTES_PER_BLOCK);
#endif
MP_STATE_MEM(gc_alloc_table_start) = (byte *)start;

View File

@ -32,7 +32,7 @@
#include "py/mpstate.h"
#include "py/misc.h"
#define WORDS_PER_BLOCK ((MICROPY_BYTES_PER_GC_BLOCK) / BYTES_PER_WORD)
#define WORDS_PER_BLOCK ((MICROPY_BYTES_PER_GC_BLOCK) / MP_BYTES_PER_OBJ_WORD)
#define BYTES_PER_BLOCK (MICROPY_BYTES_PER_GC_BLOCK)
// ptr should be of type void*

135
py/mkrules.cmake Normal file
View File

@ -0,0 +1,135 @@
# CMake fragment for MicroPython rules
set(MICROPY_GENHDR_DIR "${CMAKE_BINARY_DIR}/genhdr")
set(MICROPY_MPVERSION "${MICROPY_GENHDR_DIR}/mpversion.h")
set(MICROPY_MODULEDEFS "${MICROPY_GENHDR_DIR}/moduledefs.h")
set(MICROPY_QSTRDEFS_PY "${MICROPY_PY_DIR}/qstrdefs.h")
set(MICROPY_QSTRDEFS_LAST "${MICROPY_GENHDR_DIR}/qstr.i.last")
set(MICROPY_QSTRDEFS_SPLIT "${MICROPY_GENHDR_DIR}/qstr.split")
set(MICROPY_QSTRDEFS_COLLECTED "${MICROPY_GENHDR_DIR}/qstrdefs.collected.h")
set(MICROPY_QSTRDEFS_PREPROCESSED "${MICROPY_GENHDR_DIR}/qstrdefs.preprocessed.h")
set(MICROPY_QSTRDEFS_GENERATED "${MICROPY_GENHDR_DIR}/qstrdefs.generated.h")
# Provide defaults for preprocessor flags if not already defined
if(NOT MICROPY_CPP_FLAGS)
get_target_property(MICROPY_CPP_INC ${MICROPY_TARGET} INCLUDE_DIRECTORIES)
get_target_property(MICROPY_CPP_DEF ${MICROPY_TARGET} COMPILE_DEFINITIONS)
endif()
# Compute MICROPY_CPP_FLAGS for preprocessor
list(APPEND MICROPY_CPP_INC ${MICROPY_CPP_INC_EXTRA})
list(APPEND MICROPY_CPP_DEF ${MICROPY_CPP_DEF_EXTRA})
set(_prefix "-I")
foreach(_arg ${MICROPY_CPP_INC})
list(APPEND MICROPY_CPP_FLAGS ${_prefix}${_arg})
endforeach()
set(_prefix "-D")
foreach(_arg ${MICROPY_CPP_DEF})
list(APPEND MICROPY_CPP_FLAGS ${_prefix}${_arg})
endforeach()
list(APPEND MICROPY_CPP_FLAGS ${MICROPY_CPP_FLAGS_EXTRA})
find_package(Python3 REQUIRED COMPONENTS Interpreter)
target_sources(${MICROPY_TARGET} PRIVATE
${MICROPY_MPVERSION}
${MICROPY_QSTRDEFS_GENERATED}
)
# Command to force the build of another command
add_custom_command(
OUTPUT MICROPY_FORCE_BUILD
COMMENT ""
COMMAND echo -n
)
# Generate mpversion.h
add_custom_command(
OUTPUT ${MICROPY_MPVERSION}
COMMAND ${CMAKE_COMMAND} -E make_directory ${MICROPY_GENHDR_DIR}
COMMAND ${Python3_EXECUTABLE} ${MICROPY_DIR}/py/makeversionhdr.py ${MICROPY_MPVERSION}
DEPENDS MICROPY_FORCE_BUILD
)
# Generate moduledefs.h
add_custom_command(
OUTPUT ${MICROPY_MODULEDEFS}
COMMAND ${Python3_EXECUTABLE} ${MICROPY_PY_DIR}/makemoduledefs.py --vpath="/" ${MICROPY_SOURCE_QSTR} > ${MICROPY_MODULEDEFS}
DEPENDS ${MICROPY_MPVERSION}
${MICROPY_SOURCE_QSTR}
)
# Generate qstrs
# If any of the dependencies in this rule change then the C-preprocessor step must be run.
# It only needs to be passed the list of MICROPY_SOURCE_QSTR files that have changed since
# it was last run, but it looks like it's not possible to specify that with cmake.
add_custom_command(
OUTPUT ${MICROPY_QSTRDEFS_LAST}
COMMAND ${Python3_EXECUTABLE} ${MICROPY_PY_DIR}/makeqstrdefs.py pp ${CMAKE_C_COMPILER} -E output ${MICROPY_GENHDR_DIR}/qstr.i.last cflags ${MICROPY_CPP_FLAGS} -DNO_QSTR cxxflags ${MICROPY_CPP_FLAGS} -DNO_QSTR sources ${MICROPY_SOURCE_QSTR}
DEPENDS ${MICROPY_MODULEDEFS}
${MICROPY_SOURCE_QSTR}
VERBATIM
COMMAND_EXPAND_LISTS
)
add_custom_command(
OUTPUT ${MICROPY_QSTRDEFS_SPLIT}
COMMAND ${Python3_EXECUTABLE} ${MICROPY_PY_DIR}/makeqstrdefs.py split qstr ${MICROPY_GENHDR_DIR}/qstr.i.last ${MICROPY_GENHDR_DIR}/qstr _
COMMAND touch ${MICROPY_QSTRDEFS_SPLIT}
DEPENDS ${MICROPY_QSTRDEFS_LAST}
VERBATIM
COMMAND_EXPAND_LISTS
)
add_custom_command(
OUTPUT ${MICROPY_QSTRDEFS_COLLECTED}
COMMAND ${Python3_EXECUTABLE} ${MICROPY_PY_DIR}/makeqstrdefs.py cat qstr _ ${MICROPY_GENHDR_DIR}/qstr ${MICROPY_QSTRDEFS_COLLECTED}
DEPENDS ${MICROPY_QSTRDEFS_SPLIT}
VERBATIM
COMMAND_EXPAND_LISTS
)
add_custom_command(
OUTPUT ${MICROPY_QSTRDEFS_PREPROCESSED}
COMMAND cat ${MICROPY_QSTRDEFS_PY} ${MICROPY_QSTRDEFS_PORT} ${MICROPY_QSTRDEFS_COLLECTED} | sed "s/^Q(.*)/\"&\"/" | ${CMAKE_C_COMPILER} -E ${MICROPY_CPP_FLAGS} - | sed "s/^\\\"\\(Q(.*)\\)\\\"/\\1/" > ${MICROPY_QSTRDEFS_PREPROCESSED}
DEPENDS ${MICROPY_QSTRDEFS_PY}
${MICROPY_QSTRDEFS_PORT}
${MICROPY_QSTRDEFS_COLLECTED}
VERBATIM
COMMAND_EXPAND_LISTS
)
add_custom_command(
OUTPUT ${MICROPY_QSTRDEFS_GENERATED}
COMMAND ${Python3_EXECUTABLE} ${MICROPY_PY_DIR}/makeqstrdata.py ${MICROPY_QSTRDEFS_PREPROCESSED} > ${MICROPY_QSTRDEFS_GENERATED}
DEPENDS ${MICROPY_QSTRDEFS_PREPROCESSED}
VERBATIM
COMMAND_EXPAND_LISTS
)
# Build frozen code if enabled
if(MICROPY_FROZEN_MANIFEST)
set(MICROPY_FROZEN_CONTENT "${CMAKE_BINARY_DIR}/frozen_content.c")
target_sources(${MICROPY_TARGET} PRIVATE
${MICROPY_FROZEN_CONTENT}
)
target_compile_definitions(${MICROPY_TARGET} PUBLIC
MICROPY_QSTR_EXTRA_POOL=mp_qstr_frozen_const_pool
MICROPY_MODULE_FROZEN_MPY=\(1\)
)
add_custom_command(
OUTPUT ${MICROPY_FROZEN_CONTENT}
COMMAND ${Python3_EXECUTABLE} ${MICROPY_DIR}/tools/makemanifest.py -o ${MICROPY_FROZEN_CONTENT} -v "MPY_DIR=${MICROPY_DIR}" -v "PORT_DIR=${MICROPY_PORT_DIR}" -b "${CMAKE_BINARY_DIR}" -f${MICROPY_CROSS_FLAGS} ${MICROPY_FROZEN_MANIFEST}
DEPENDS MICROPY_FORCE_BUILD
${MICROPY_QSTRDEFS_GENERATED}
VERBATIM
)
endif()

View File

@ -197,7 +197,7 @@ STATIC const mp_stream_p_t bufwriter_stream_p = {
.write = bufwriter_write,
};
STATIC const mp_obj_type_t bufwriter_type = {
STATIC const mp_obj_type_t mp_type_bufwriter = {
{ &mp_type_type },
.name = MP_QSTR_BufferedWriter,
.make_new = bufwriter_make_new,
@ -272,7 +272,7 @@ STATIC const mp_rom_map_elem_t mp_module_io_globals_table[] = {
{ MP_ROM_QSTR(MP_QSTR_BytesIO), MP_ROM_PTR(&mp_type_bytesio) },
#endif
#if MICROPY_PY_IO_BUFFEREDWRITER
{ MP_ROM_QSTR(MP_QSTR_BufferedWriter), MP_ROM_PTR(&bufwriter_type) },
{ MP_ROM_QSTR(MP_QSTR_BufferedWriter), MP_ROM_PTR(&mp_type_bufwriter) },
#endif
};

View File

@ -114,7 +114,7 @@
// Number of bytes in memory allocation/GC block. Any size allocated will be
// rounded up to be multiples of this.
#ifndef MICROPY_BYTES_PER_GC_BLOCK
#define MICROPY_BYTES_PER_GC_BLOCK (4 * BYTES_PER_WORD)
#define MICROPY_BYTES_PER_GC_BLOCK (4 * MP_BYTES_PER_OBJ_WORD)
#endif
// Number of words allocated (in BSS) to the GC stack (minimum is 1)
@ -223,6 +223,11 @@
#define MICROPY_MODULE_DICT_SIZE (1)
#endif
// Initial size of sys.modules dict
#ifndef MICROPY_LOADED_MODULES_DICT_SIZE
#define MICROPY_LOADED_MODULES_DICT_SIZE (3)
#endif
// Whether realloc/free should be passed allocated memory region size
// You must enable this if MICROPY_MEM_STATS is enabled
#ifndef MICROPY_MALLOC_USES_ALLOCATED_SIZE
@ -1546,17 +1551,17 @@ typedef double mp_float_t;
#define STATIC static
#endif
// Number of bytes in a word
#ifndef BYTES_PER_WORD
#define BYTES_PER_WORD (sizeof(mp_uint_t))
// Number of bytes in an object word: mp_obj_t, mp_uint_t, mp_uint_t
#ifndef MP_BYTES_PER_OBJ_WORD
#define MP_BYTES_PER_OBJ_WORD (sizeof(mp_uint_t))
#endif
#ifndef BITS_PER_BYTE
#define BITS_PER_BYTE (8)
// Number of bits in a byte
#ifndef MP_BITS_PER_BYTE
#define MP_BITS_PER_BYTE (8)
#endif
#define BITS_PER_WORD (BITS_PER_BYTE * BYTES_PER_WORD)
// mp_int_t value with most significant bit set
#define WORD_MSBIT_HIGH (((mp_uint_t)1) << (BYTES_PER_WORD * 8 - 1))
#define MP_OBJ_WORD_MSBIT_HIGH (((mp_uint_t)1) << (MP_BYTES_PER_OBJ_WORD * MP_BITS_PER_BYTE - 1))
// Make sure both MP_ENDIANNESS_LITTLE and MP_ENDIANNESS_BIG are
// defined and that they are the opposite of each other.

View File

@ -535,60 +535,37 @@ STATIC void mpn_div(mpz_dig_t *num_dig, size_t *num_len, const mpz_dig_t *den_di
quo /= lead_den_digit;
// Multiply quo by den and subtract from num to get remainder.
// We have different code here to handle different compile-time
// configurations of mpz:
//
// 1. DIG_SIZE is stricly less than half the number of bits
// available in mpz_dbl_dig_t. In this case we can use a
// slightly more optimal (in time and space) routine that
// uses the extra bits in mpz_dbl_dig_signed_t to store a
// sign bit.
//
// 2. DIG_SIZE is exactly half the number of bits available in
// mpz_dbl_dig_t. In this (common) case we need to be careful
// not to overflow the borrow variable. And the shifting of
// borrow needs some special logic (it's a shift right with
// round up).
//
// Must be careful with overflow of the borrow variable. Both
// borrow and low_digs are signed values and need signed right-shift,
// but x is unsigned and may take a full-range value.
const mpz_dig_t *d = den_dig;
mpz_dbl_dig_t d_norm = 0;
mpz_dbl_dig_t borrow = 0;
mpz_dbl_dig_signed_t borrow = 0;
for (mpz_dig_t *n = num_dig - den_len; n < num_dig; ++n, ++d) {
// Get the next digit in (den).
d_norm = ((mpz_dbl_dig_t)*d << norm_shift) | (d_norm >> DIG_SIZE);
// Multiply the next digit in (quo * den).
mpz_dbl_dig_t x = (mpz_dbl_dig_t)quo * (d_norm & DIG_MASK);
#if DIG_SIZE < MPZ_DBL_DIG_SIZE / 2
borrow += (mpz_dbl_dig_t)*n - x; // will overflow if DIG_SIZE >= MPZ_DBL_DIG_SIZE/2
*n = borrow & DIG_MASK;
borrow = (mpz_dbl_dig_signed_t)borrow >> DIG_SIZE;
#else // DIG_SIZE == MPZ_DBL_DIG_SIZE / 2
if (x >= *n || *n - x <= borrow) {
borrow += x - (mpz_dbl_dig_t)*n;
*n = (-borrow) & DIG_MASK;
borrow = (borrow >> DIG_SIZE) + ((borrow & DIG_MASK) == 0 ? 0 : 1); // shift-right with round-up
} else {
*n = ((mpz_dbl_dig_t)*n - x - borrow) & DIG_MASK;
borrow = 0;
}
#endif
// Compute the low DIG_MASK bits of the next digit in (num - quo * den)
mpz_dbl_dig_signed_t low_digs = (borrow & DIG_MASK) + *n - (x & DIG_MASK);
// Store the digit result for (num).
*n = low_digs & DIG_MASK;
// Compute the borrow, shifted right before summing to avoid overflow.
borrow = (borrow >> DIG_SIZE) - (x >> DIG_SIZE) + (low_digs >> DIG_SIZE);
}
#if DIG_SIZE < MPZ_DBL_DIG_SIZE / 2
// Borrow was negative in the above for-loop, make it positive for next if-block.
borrow = -borrow;
#endif
// At this point we have either:
//
// 1. quo was the correct value and the most-sig-digit of num is exactly
// cancelled by borrow (borrow == *num_dig). In this case there is
// cancelled by borrow (borrow + *num_dig == 0). In this case there is
// nothing more to do.
//
// 2. quo was too large, we subtracted too many den from num, and the
// most-sig-digit of num is 1 less than borrow (borrow == *num_dig + 1).
// most-sig-digit of num is less than needed (borrow + *num_dig < 0).
// In this case we must reduce quo and add back den to num until the
// carry from this operation cancels out the borrow.
//
borrow -= *num_dig;
borrow += *num_dig;
for (; borrow != 0; --quo) {
d = den_dig;
d_norm = 0;
@ -599,7 +576,7 @@ STATIC void mpn_div(mpz_dig_t *num_dig, size_t *num_len, const mpz_dig_t *den_di
*n = carry & DIG_MASK;
carry >>= DIG_SIZE;
}
borrow -= carry;
borrow += carry;
}
// store this digit of the quotient
@ -1577,7 +1554,7 @@ bool mpz_as_int_checked(const mpz_t *i, mp_int_t *value) {
mpz_dig_t *d = i->dig + i->len;
while (d-- > i->dig) {
if (val > (~(WORD_MSBIT_HIGH) >> DIG_SIZE)) {
if (val > (~(MP_OBJ_WORD_MSBIT_HIGH) >> DIG_SIZE)) {
// will overflow
return false;
}
@ -1602,7 +1579,7 @@ bool mpz_as_uint_checked(const mpz_t *i, mp_uint_t *value) {
mpz_dig_t *d = i->dig + i->len;
while (d-- > i->dig) {
if (val > (~(WORD_MSBIT_HIGH) >> (DIG_SIZE - 1))) {
if (val > (~(MP_OBJ_WORD_MSBIT_HIGH) >> (DIG_SIZE - 1))) {
// will overflow
return false;
}

View File

@ -39,6 +39,7 @@
#define MICROPY_NLR_NUM_REGS_X64_WIN (10)
#define MICROPY_NLR_NUM_REGS_ARM_THUMB (10)
#define MICROPY_NLR_NUM_REGS_ARM_THUMB_FP (10 + 6)
#define MICROPY_NLR_NUM_REGS_AARCH64 (13)
#define MICROPY_NLR_NUM_REGS_XTENSA (10)
#define MICROPY_NLR_NUM_REGS_XTENSAWIN (17)
@ -75,6 +76,9 @@
// so only save/restore those as an optimisation.
#define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_ARM_THUMB_FP)
#endif
#elif defined(__aarch64__)
#define MICROPY_NLR_AARCH64 (1)
#define MICROPY_NLR_NUM_REGS (MICROPY_NLR_NUM_REGS_AARCH64)
#elif defined(__xtensa__)
#define MICROPY_NLR_SETJMP (0)
#define MICROPY_NLR_XTENSA (1)

74
py/nlraarch64.c Normal file
View File

@ -0,0 +1,74 @@
/*
* This file is part of the MicroPython project, http://micropython.org/
*
* The MIT License (MIT)
*
* Copyright (c) 2021 Yonatan Goldschmidt
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "py/mpstate.h" // needed for NLR defs
#if defined(MICROPY_NLR_AARCH64) && MICROPY_NLR_AARCH64
// AArch64 callee-saved registers are x19-x29.
// https://en.wikipedia.org/wiki/Calling_convention#ARM_(A64)
// Implemented purely as inline assembly; inside a function, we have to deal with undoing the prologue, restoring
// SP and LR. This way, we don't.
__asm(
"nlr_push: \n"
".global nlr_push \n"
"mov x9, sp \n"
"stp lr, x9, [x0, #16]\n" // 16 == offsetof(nlr_buf_t, regs)
"stp x19, x20, [x0, #32]\n"
"stp x21, x22, [x0, #48]\n"
"stp x23, x24, [x0, #64]\n"
"stp x25, x26, [x0, #80]\n"
"stp x27, x28, [x0, #96]\n"
"str x29, [x0, #112]\n"
"b nlr_push_tail \n" // do the rest in C
);
NORETURN void nlr_jump(void *val) {
MP_NLR_JUMP_HEAD(val, top)
MP_STATIC_ASSERT(offsetof(nlr_buf_t, regs) == 16); // asm assumes it
__asm volatile (
"ldr x29, [%0, #112]\n"
"ldp x27, x28, [%0, #96]\n"
"ldp x25, x26, [%0, #80]\n"
"ldp x23, x24, [%0, #64]\n"
"ldp x21, x22, [%0, #48]\n"
"ldp x19, x20, [%0, #32]\n"
"ldp lr, x9, [%0, #16]\n" // 16 == offsetof(nlr_buf_t, regs)
"mov sp, x9 \n"
"mov x0, #1 \n" // non-local return
"ret \n"
:
: "r" (top)
:
);
MP_UNREACHABLE
}
#endif // MICROPY_NLR_AARCH64

View File

@ -89,7 +89,7 @@ NORETURN void nlr_jump(void *val) {
"movq %0, %%rcx \n" // %rcx points to nlr_buf
#if MICROPY_NLR_OS_WINDOWS
"movq 88(%%rcx), %%rsi \n" // load saved %rsi
"movq 80(%%rcx), %%rdi \n" // load saved %rdr
"movq 80(%%rcx), %%rdi \n" // load saved %rdi
#endif
"movq 72(%%rcx), %%r15 \n" // load saved %r15
"movq 64(%%rcx), %%r14 \n" // load saved %r14

View File

@ -814,7 +814,7 @@ STATIC mp_obj_t array_it_iternext(mp_obj_t self_in) {
}
}
STATIC const mp_obj_type_t array_it_type = {
STATIC const mp_obj_type_t mp_type_array_it = {
{ &mp_type_type },
.name = MP_QSTR_iterator,
.getiter = mp_identity_getiter,
@ -825,7 +825,7 @@ STATIC mp_obj_t array_iterator_new(mp_obj_t array_in, mp_obj_iter_buf_t *iter_bu
assert(sizeof(mp_obj_array_t) <= sizeof(mp_obj_iter_buf_t));
mp_obj_array_t *array = MP_OBJ_TO_PTR(array_in);
mp_obj_array_it_t *o = (mp_obj_array_it_t *)iter_buf;
o->base.type = &array_it_type;
o->base.type = &mp_type_array_it;
o->array = array;
o->offset = 0;
o->cur = 0;

View File

@ -78,7 +78,7 @@ STATIC void closure_print(const mp_print_t *print, mp_obj_t o_in, mp_print_kind_
}
#endif
const mp_obj_type_t closure_type = {
const mp_obj_type_t mp_type_closure = {
{ &mp_type_type },
.flags = MP_TYPE_FLAG_BINDS_SELF,
.name = MP_QSTR_closure,
@ -90,7 +90,7 @@ const mp_obj_type_t closure_type = {
mp_obj_t mp_obj_new_closure(mp_obj_t fun, size_t n_closed_over, const mp_obj_t *closed) {
mp_obj_closure_t *o = m_new_obj_var(mp_obj_closure_t, mp_obj_t, n_closed_over);
o->base.type = &closure_type;
o->base.type = &mp_type_closure;
o->fun = fun;
o->n_closed = n_closed_over;
memcpy(o->closed, closed, n_closed_over * sizeof(mp_obj_t));

View File

@ -411,8 +411,8 @@ STATIC MP_DEFINE_CONST_FUN_OBJ_KW(dict_update_obj, 1, dict_update);
/******************************************************************************/
/* dict views */
STATIC const mp_obj_type_t dict_view_type;
STATIC const mp_obj_type_t dict_view_it_type;
STATIC const mp_obj_type_t mp_type_dict_view;
STATIC const mp_obj_type_t mp_type_dict_view_it;
typedef enum _mp_dict_view_kind_t {
MP_DICT_VIEW_ITEMS,
@ -436,7 +436,7 @@ typedef struct _mp_obj_dict_view_t {
} mp_obj_dict_view_t;
STATIC mp_obj_t dict_view_it_iternext(mp_obj_t self_in) {
mp_check_self(mp_obj_is_type(self_in, &dict_view_it_type));
mp_check_self(mp_obj_is_type(self_in, &mp_type_dict_view_it));
mp_obj_dict_view_it_t *self = MP_OBJ_TO_PTR(self_in);
mp_map_elem_t *next = dict_iter_next(MP_OBJ_TO_PTR(self->dict), &self->cur);
@ -457,7 +457,7 @@ STATIC mp_obj_t dict_view_it_iternext(mp_obj_t self_in) {
}
}
STATIC const mp_obj_type_t dict_view_it_type = {
STATIC const mp_obj_type_t mp_type_dict_view_it = {
{ &mp_type_type },
.name = MP_QSTR_iterator,
.getiter = mp_identity_getiter,
@ -466,10 +466,10 @@ STATIC const mp_obj_type_t dict_view_it_type = {
STATIC mp_obj_t dict_view_getiter(mp_obj_t view_in, mp_obj_iter_buf_t *iter_buf) {
assert(sizeof(mp_obj_dict_view_it_t) <= sizeof(mp_obj_iter_buf_t));
mp_check_self(mp_obj_is_type(view_in, &dict_view_type));
mp_check_self(mp_obj_is_type(view_in, &mp_type_dict_view));
mp_obj_dict_view_t *view = MP_OBJ_TO_PTR(view_in);
mp_obj_dict_view_it_t *o = (mp_obj_dict_view_it_t *)iter_buf;
o->base.type = &dict_view_it_type;
o->base.type = &mp_type_dict_view_it;
o->kind = view->kind;
o->dict = view->dict;
o->cur = 0;
@ -478,7 +478,7 @@ STATIC mp_obj_t dict_view_getiter(mp_obj_t view_in, mp_obj_iter_buf_t *iter_buf)
STATIC void dict_view_print(const mp_print_t *print, mp_obj_t self_in, mp_print_kind_t kind) {
(void)kind;
mp_check_self(mp_obj_is_type(self_in, &dict_view_type));
mp_check_self(mp_obj_is_type(self_in, &mp_type_dict_view));
mp_obj_dict_view_t *self = MP_OBJ_TO_PTR(self_in);
bool first = true;
mp_print_str(print, mp_dict_view_names[self->kind]);
@ -508,7 +508,7 @@ STATIC mp_obj_t dict_view_binary_op(mp_binary_op_t op, mp_obj_t lhs_in, mp_obj_t
return dict_binary_op(op, o->dict, rhs_in);
}
STATIC const mp_obj_type_t dict_view_type = {
STATIC const mp_obj_type_t mp_type_dict_view = {
{ &mp_type_type },
.name = MP_QSTR_dict_view,
.print = dict_view_print,
@ -518,7 +518,7 @@ STATIC const mp_obj_type_t dict_view_type = {
STATIC mp_obj_t mp_obj_new_dict_view(mp_obj_t dict, mp_dict_view_kind_t kind) {
mp_obj_dict_view_t *o = m_new_obj(mp_obj_dict_view_t);
o->base.type = &dict_view_type;
o->base.type = &mp_type_dict_view;
o->dict = dict;
o->kind = kind;
return MP_OBJ_FROM_PTR(o);
@ -551,7 +551,7 @@ STATIC mp_obj_t dict_getiter(mp_obj_t self_in, mp_obj_iter_buf_t *iter_buf) {
assert(sizeof(mp_obj_dict_view_it_t) <= sizeof(mp_obj_iter_buf_t));
mp_check_self(mp_obj_is_dict_or_ordereddict(self_in));
mp_obj_dict_view_it_t *o = (mp_obj_dict_view_it_t *)iter_buf;
o->base.type = &dict_view_it_type;
o->base.type = &mp_type_dict_view_it;
o->kind = MP_DICT_VIEW_KEYS;
o->dict = self_in;
o->cur = 0;

View File

@ -82,7 +82,7 @@ mp_int_t mp_float_hash(mp_float_t src) {
// number may have a fraction; xor the integer part with the fractional part
val = (frc >> (MP_FLOAT_FRAC_BITS - adj_exp))
^ (frc & (((mp_float_uint_t)1 << (MP_FLOAT_FRAC_BITS - adj_exp)) - 1));
} else if ((unsigned int)adj_exp < BITS_PER_BYTE * sizeof(mp_int_t) - 1) {
} else if ((unsigned int)adj_exp < MP_BITS_PER_BYTE * sizeof(mp_int_t) - 1) {
// the number is a (big) whole integer and will fit in val's signed-width
val = (mp_int_t)frc << (adj_exp - MP_FLOAT_FRAC_BITS);
} else {

View File

@ -57,7 +57,7 @@ STATIC mp_obj_t it_iternext(mp_obj_t self_in) {
}
}
STATIC const mp_obj_type_t it_type = {
STATIC const mp_obj_type_t mp_type_it = {
{ &mp_type_type },
.name = MP_QSTR_iterator,
.getiter = mp_identity_getiter,
@ -68,7 +68,7 @@ STATIC const mp_obj_type_t it_type = {
mp_obj_t mp_obj_new_getitem_iter(mp_obj_t *args, mp_obj_iter_buf_t *iter_buf) {
assert(sizeof(mp_obj_getitem_iter_t) <= sizeof(mp_obj_iter_buf_t));
mp_obj_getitem_iter_t *o = (mp_obj_getitem_iter_t *)iter_buf;
o->base.type = &it_type;
o->base.type = &mp_type_it;
o->args[0] = args[0];
o->args[1] = args[1];
o->args[2] = MP_OBJ_NEW_SMALL_INT(0);

View File

@ -123,7 +123,7 @@ STATIC mp_fp_as_int_class_t mp_classify_fp_as_int(mp_float_t val) {
return MP_FP_CLASS_FIT_SMALLINT;
}
#if MICROPY_LONGINT_IMPL == MICROPY_LONGINT_IMPL_LONGLONG
if (e <= (((sizeof(long long) * BITS_PER_BYTE) + MP_FLOAT_EXP_BIAS - 2) << MP_FLOAT_EXP_SHIFT_I32)) {
if (e <= (((sizeof(long long) * MP_BITS_PER_BYTE) + MP_FLOAT_EXP_BIAS - 2) << MP_FLOAT_EXP_SHIFT_I32)) {
return MP_FP_CLASS_FIT_LONGINT;
}
#endif

View File

@ -52,7 +52,7 @@ STATIC mp_obj_t range_it_iternext(mp_obj_t o_in) {
}
}
STATIC const mp_obj_type_t range_it_type = {
STATIC const mp_obj_type_t mp_type_range_it = {
{ &mp_type_type },
.name = MP_QSTR_iterator,
.getiter = mp_identity_getiter,
@ -62,7 +62,7 @@ STATIC const mp_obj_type_t range_it_type = {
STATIC mp_obj_t mp_obj_new_range_iterator(mp_int_t cur, mp_int_t stop, mp_int_t step, mp_obj_iter_buf_t *iter_buf) {
assert(sizeof(mp_obj_range_it_t) <= sizeof(mp_obj_iter_buf_t));
mp_obj_range_it_t *o = (mp_obj_range_it_t *)iter_buf;
o->base.type = &range_it_type;
o->base.type = &mp_type_range_it;
o->cur = cur;
o->stop = stop;
o->step = step;

View File

@ -623,7 +623,7 @@ STATIC void mp_print_bytes(mp_print_t *print, const byte *data, size_t len) {
print->print_strn(print->data, (const char *)data, len);
}
#define BYTES_FOR_INT ((BYTES_PER_WORD * 8 + 6) / 7)
#define BYTES_FOR_INT ((MP_BYTES_PER_OBJ_WORD * 8 + 6) / 7)
STATIC void mp_print_uint(mp_print_t *print, size_t n) {
byte buf[BYTES_FOR_INT];
byte *p = buf + sizeof(buf);

View File

@ -172,7 +172,7 @@ STATIC void code_attr(mp_obj_t self_in, qstr attr, mp_obj_t *dest) {
}
}
const mp_obj_type_t mp_type_code = {
const mp_obj_type_t mp_type_settrace_codeobj = {
{ &mp_type_type },
.name = MP_QSTR_code,
.print = code_print,
@ -185,7 +185,7 @@ mp_obj_t mp_obj_new_code(const mp_raw_code_t *rc) {
if (o == NULL) {
return MP_OBJ_NULL;
}
o->base.type = &mp_type_code;
o->base.type = &mp_type_settrace_codeobj;
o->rc = rc;
o->dict_locals = mp_locals_get(); // this is a wrong! how to do this properly?
o->lnotab = MP_OBJ_NULL;

148
py/py.cmake Normal file
View File

@ -0,0 +1,148 @@
# CMake fragment for MicroPython core py component
set(MICROPY_PY_DIR "${MICROPY_DIR}/py")
list(APPEND MICROPY_INC_CORE "${MICROPY_DIR}")
# All py/ source files
set(MICROPY_SOURCE_PY
${MICROPY_PY_DIR}/argcheck.c
${MICROPY_PY_DIR}/asmarm.c
${MICROPY_PY_DIR}/asmbase.c
${MICROPY_PY_DIR}/asmthumb.c
${MICROPY_PY_DIR}/asmx64.c
${MICROPY_PY_DIR}/asmx86.c
${MICROPY_PY_DIR}/asmxtensa.c
${MICROPY_PY_DIR}/bc.c
${MICROPY_PY_DIR}/binary.c
${MICROPY_PY_DIR}/builtinevex.c
${MICROPY_PY_DIR}/builtinhelp.c
${MICROPY_PY_DIR}/builtinimport.c
${MICROPY_PY_DIR}/compile.c
${MICROPY_PY_DIR}/emitbc.c
${MICROPY_PY_DIR}/emitcommon.c
${MICROPY_PY_DIR}/emitglue.c
${MICROPY_PY_DIR}/emitinlinethumb.c
${MICROPY_PY_DIR}/emitinlinextensa.c
${MICROPY_PY_DIR}/emitnarm.c
${MICROPY_PY_DIR}/emitnthumb.c
${MICROPY_PY_DIR}/emitnx64.c
${MICROPY_PY_DIR}/emitnx86.c
${MICROPY_PY_DIR}/emitnxtensa.c
${MICROPY_PY_DIR}/emitnxtensawin.c
${MICROPY_PY_DIR}/formatfloat.c
${MICROPY_PY_DIR}/frozenmod.c
${MICROPY_PY_DIR}/gc.c
${MICROPY_PY_DIR}/lexer.c
${MICROPY_PY_DIR}/malloc.c
${MICROPY_PY_DIR}/map.c
${MICROPY_PY_DIR}/modarray.c
${MICROPY_PY_DIR}/modbuiltins.c
${MICROPY_PY_DIR}/modcmath.c
${MICROPY_PY_DIR}/modcollections.c
${MICROPY_PY_DIR}/modgc.c
${MICROPY_PY_DIR}/modio.c
${MICROPY_PY_DIR}/modmath.c
${MICROPY_PY_DIR}/modmicropython.c
${MICROPY_PY_DIR}/modstruct.c
${MICROPY_PY_DIR}/modsys.c
${MICROPY_PY_DIR}/modthread.c
${MICROPY_PY_DIR}/moduerrno.c
${MICROPY_PY_DIR}/mpprint.c
${MICROPY_PY_DIR}/mpstate.c
${MICROPY_PY_DIR}/mpz.c
${MICROPY_PY_DIR}/nativeglue.c
${MICROPY_PY_DIR}/nlr.c
${MICROPY_PY_DIR}/nlrpowerpc.c
${MICROPY_PY_DIR}/nlrsetjmp.c
${MICROPY_PY_DIR}/nlrthumb.c
${MICROPY_PY_DIR}/nlrx64.c
${MICROPY_PY_DIR}/nlrx86.c
${MICROPY_PY_DIR}/nlrxtensa.c
${MICROPY_PY_DIR}/obj.c
${MICROPY_PY_DIR}/objarray.c
${MICROPY_PY_DIR}/objattrtuple.c
${MICROPY_PY_DIR}/objbool.c
${MICROPY_PY_DIR}/objboundmeth.c
${MICROPY_PY_DIR}/objcell.c
${MICROPY_PY_DIR}/objclosure.c
${MICROPY_PY_DIR}/objcomplex.c
${MICROPY_PY_DIR}/objdeque.c
${MICROPY_PY_DIR}/objdict.c
${MICROPY_PY_DIR}/objenumerate.c
${MICROPY_PY_DIR}/objexcept.c
${MICROPY_PY_DIR}/objfilter.c
${MICROPY_PY_DIR}/objfloat.c
${MICROPY_PY_DIR}/objfun.c
${MICROPY_PY_DIR}/objgenerator.c
${MICROPY_PY_DIR}/objgetitemiter.c
${MICROPY_PY_DIR}/objint.c
${MICROPY_PY_DIR}/objint_longlong.c
${MICROPY_PY_DIR}/objint_mpz.c
${MICROPY_PY_DIR}/objlist.c
${MICROPY_PY_DIR}/objmap.c
${MICROPY_PY_DIR}/objmodule.c
${MICROPY_PY_DIR}/objnamedtuple.c
${MICROPY_PY_DIR}/objnone.c
${MICROPY_PY_DIR}/objobject.c
${MICROPY_PY_DIR}/objpolyiter.c
${MICROPY_PY_DIR}/objproperty.c
${MICROPY_PY_DIR}/objrange.c
${MICROPY_PY_DIR}/objreversed.c
${MICROPY_PY_DIR}/objset.c
${MICROPY_PY_DIR}/objsingleton.c
${MICROPY_PY_DIR}/objslice.c
${MICROPY_PY_DIR}/objstr.c
${MICROPY_PY_DIR}/objstringio.c
${MICROPY_PY_DIR}/objstrunicode.c
${MICROPY_PY_DIR}/objtuple.c
${MICROPY_PY_DIR}/objtype.c
${MICROPY_PY_DIR}/objzip.c
${MICROPY_PY_DIR}/opmethods.c
${MICROPY_PY_DIR}/pairheap.c
${MICROPY_PY_DIR}/parse.c
${MICROPY_PY_DIR}/parsenum.c
${MICROPY_PY_DIR}/parsenumbase.c
${MICROPY_PY_DIR}/persistentcode.c
${MICROPY_PY_DIR}/profile.c
${MICROPY_PY_DIR}/pystack.c
${MICROPY_PY_DIR}/qstr.c
${MICROPY_PY_DIR}/reader.c
${MICROPY_PY_DIR}/repl.c
${MICROPY_PY_DIR}/ringbuf.c
${MICROPY_PY_DIR}/runtime.c
${MICROPY_PY_DIR}/runtime_utils.c
${MICROPY_PY_DIR}/scheduler.c
${MICROPY_PY_DIR}/scope.c
${MICROPY_PY_DIR}/sequence.c
${MICROPY_PY_DIR}/showbc.c
${MICROPY_PY_DIR}/smallint.c
${MICROPY_PY_DIR}/stackctrl.c
${MICROPY_PY_DIR}/stream.c
${MICROPY_PY_DIR}/unicode.c
${MICROPY_PY_DIR}/vm.c
${MICROPY_PY_DIR}/vstr.c
${MICROPY_PY_DIR}/warning.c
)
# Helper macro to collect include directories and compile definitions for qstr processing.
macro(micropy_gather_target_properties targ)
if(TARGET ${targ})
get_target_property(type ${targ} TYPE)
set(_inc OFF)
set(_def OFF)
if(${type} STREQUAL STATIC_LIBRARY)
get_target_property(_inc ${targ} INCLUDE_DIRECTORIES)
get_target_property(_def ${targ} COMPILE_DEFINITIONS)
elseif(${type} STREQUAL INTERFACE_LIBRARY)
get_target_property(_inc ${targ} INTERFACE_INCLUDE_DIRECTORIES)
get_target_property(_def ${targ} INTERFACE_COMPILE_DEFINITIONS)
endif()
if(_inc)
list(APPEND MICROPY_CPP_INC_EXTRA ${_inc})
endif()
if(_def)
list(APPEND MICROPY_CPP_DEF_EXTRA ${_def})
endif()
endif()
endmacro()

View File

@ -66,6 +66,7 @@ PY_CORE_O_BASENAME = $(addprefix py/,\
nlrx86.o \
nlrx64.o \
nlrthumb.o \
nlraarch64.o \
nlrpowerpc.o \
nlrxtensa.o \
nlrsetjmp.o \

View File

@ -105,7 +105,7 @@ void mp_init(void) {
#endif
// init global module dict
mp_obj_dict_init(&MP_STATE_VM(mp_loaded_modules_dict), 3);
mp_obj_dict_init(&MP_STATE_VM(mp_loaded_modules_dict), MICROPY_LOADED_MODULES_DICT_SIZE);
// initialise the __main__ module
mp_obj_dict_init(&MP_STATE_VM(dict_main), 1);
@ -389,7 +389,9 @@ mp_obj_t PLACE_IN_ITCM(mp_binary_op)(mp_binary_op_t op, mp_obj_t lhs, mp_obj_t r
if (rhs_val < 0) {
// negative shift not allowed
mp_raise_ValueError(MP_ERROR_TEXT("negative shift count"));
} else if (rhs_val >= (mp_int_t)BITS_PER_WORD || lhs_val > (MP_SMALL_INT_MAX >> rhs_val) || lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
} else if (rhs_val >= (mp_int_t)(sizeof(lhs_val) * MP_BITS_PER_BYTE)
|| lhs_val > (MP_SMALL_INT_MAX >> rhs_val)
|| lhs_val < (MP_SMALL_INT_MIN >> rhs_val)) {
// left-shift will overflow, so use higher precision integer
lhs = mp_obj_new_int_from_ll(lhs_val);
goto generic_binary_op;
@ -406,10 +408,10 @@ mp_obj_t PLACE_IN_ITCM(mp_binary_op)(mp_binary_op_t op, mp_obj_t lhs, mp_obj_t r
mp_raise_ValueError(MP_ERROR_TEXT("negative shift count"));
} else {
// standard precision is enough for right-shift
if (rhs_val >= (mp_int_t)BITS_PER_WORD) {
if (rhs_val >= (mp_int_t)(sizeof(lhs_val) * MP_BITS_PER_BYTE)) {
// Shifting to big amounts is underfined behavior
// in C and is CPU-dependent; propagate sign bit.
rhs_val = BITS_PER_WORD - 1;
rhs_val = sizeof(lhs_val) * MP_BITS_PER_BYTE - 1;
}
lhs_val >>= rhs_val;
}

View File

@ -36,17 +36,17 @@
// In SMALL_INT, next-to-highest bits is used as sign, so both must match for value in range
#if MICROPY_OBJ_REPR == MICROPY_OBJ_REPR_A || MICROPY_OBJ_REPR == MICROPY_OBJ_REPR_C
#define MP_SMALL_INT_MIN ((mp_int_t)(((mp_int_t)WORD_MSBIT_HIGH) >> 1))
#define MP_SMALL_INT_FITS(n) ((((n) ^ ((n) << 1)) & WORD_MSBIT_HIGH) == 0)
#define MP_SMALL_INT_MIN ((mp_int_t)(((mp_int_t)MP_OBJ_WORD_MSBIT_HIGH) >> 1))
#define MP_SMALL_INT_FITS(n) ((((n) ^ ((n) << 1)) & MP_OBJ_WORD_MSBIT_HIGH) == 0)
// Mask to truncate mp_int_t to positive value
#define MP_SMALL_INT_POSITIVE_MASK ~(WORD_MSBIT_HIGH | (WORD_MSBIT_HIGH >> 1))
#define MP_SMALL_INT_POSITIVE_MASK ~(MP_OBJ_WORD_MSBIT_HIGH | (MP_OBJ_WORD_MSBIT_HIGH >> 1))
#elif MICROPY_OBJ_REPR == MICROPY_OBJ_REPR_B
#define MP_SMALL_INT_MIN ((mp_int_t)(((mp_int_t)WORD_MSBIT_HIGH) >> 2))
#define MP_SMALL_INT_MIN ((mp_int_t)(((mp_int_t)MP_OBJ_WORD_MSBIT_HIGH) >> 2))
#define MP_SMALL_INT_FITS(n) ((((n) & MP_SMALL_INT_MIN) == 0) || (((n) & MP_SMALL_INT_MIN) == MP_SMALL_INT_MIN))
// Mask to truncate mp_int_t to positive value
#define MP_SMALL_INT_POSITIVE_MASK ~(WORD_MSBIT_HIGH | (WORD_MSBIT_HIGH >> 1) | (WORD_MSBIT_HIGH >> 2))
#define MP_SMALL_INT_POSITIVE_MASK ~(MP_OBJ_WORD_MSBIT_HIGH | (MP_OBJ_WORD_MSBIT_HIGH >> 1) | (MP_OBJ_WORD_MSBIT_HIGH >> 2))
#elif MICROPY_OBJ_REPR == MICROPY_OBJ_REPR_D

52
py/usermod.cmake Normal file
View File

@ -0,0 +1,52 @@
# Create a target for all user modules to link against.
add_library(usermod INTERFACE)
function(usermod_gather_sources SOURCES_VARNAME INCLUDE_DIRECTORIES_VARNAME INCLUDED_VARNAME LIB)
if (NOT ${LIB} IN_LIST ${INCLUDED_VARNAME})
list(APPEND ${INCLUDED_VARNAME} ${LIB})
# Gather library sources
get_target_property(lib_sources ${LIB} INTERFACE_SOURCES)
if (lib_sources)
list(APPEND ${SOURCES_VARNAME} ${lib_sources})
endif()
# Gather library includes
get_target_property(lib_include_directories ${LIB} INTERFACE_INCLUDE_DIRECTORIES)
if (lib_include_directories)
list(APPEND ${INCLUDE_DIRECTORIES_VARNAME} ${lib_include_directories})
endif()
# Recurse linked libraries
get_target_property(trans_depend ${LIB} INTERFACE_LINK_LIBRARIES)
if (trans_depend)
foreach(SUB_LIB ${trans_depend})
usermod_gather_sources(
${SOURCES_VARNAME}
${INCLUDE_DIRECTORIES_VARNAME}
${INCLUDED_VARNAME}
${SUB_LIB})
endforeach()
endif()
set(${SOURCES_VARNAME} ${${SOURCES_VARNAME}} PARENT_SCOPE)
set(${INCLUDE_DIRECTORIES_VARNAME} ${${INCLUDE_DIRECTORIES_VARNAME}} PARENT_SCOPE)
set(${INCLUDED_VARNAME} ${${INCLUDED_VARNAME}} PARENT_SCOPE)
endif()
endfunction()
# Include CMake files for user modules.
if (USER_C_MODULES)
foreach(USER_C_MODULE_PATH ${USER_C_MODULES})
message("Including User C Module(s) from ${USER_C_MODULE_PATH}")
include(${USER_C_MODULE_PATH})
endforeach()
endif()
# Recursively gather sources for QSTR scanning - doesn't support generator expressions.
usermod_gather_sources(MICROPY_SOURCE_USERMOD MICROPY_INC_USERMOD found_modules usermod)
# Report found modules.
list(REMOVE_ITEM found_modules "usermod")
list(JOIN found_modules ", " found_modules)
message("Found User C Module(s): ${found_modules}")

View File

@ -41,7 +41,12 @@
// *FORMAT-OFF*
#if 0
#define TRACE(ip) printf("sp=%d ", (int)(sp - &code_state->state[0] + 1)); mp_bytecode_print2(&mp_plat_print, ip, 1, code_state->fun_bc->const_table);
#if MICROPY_PY_THREAD
#define TRACE_PREFIX mp_printf(&mp_plat_print, "ts=%p sp=%d ", mp_thread_get_state(), (int)(sp - &code_state->state[0] + 1))
#else
#define TRACE_PREFIX mp_printf(&mp_plat_print, "sp=%d ", (int)(sp - &code_state->state[0] + 1))
#endif
#define TRACE(ip) TRACE_PREFIX; mp_bytecode_print2(&mp_plat_print, ip, 1, code_state->fun_bc->const_table);
#else
#define TRACE(ip)
#endif

View File

@ -1,15 +1,15 @@
This directory contains tests for various functionality areas of MicroPython.
To run all stable tests, run "run-tests" script in this directory.
To run all stable tests, run "run-tests.py" script in this directory.
Tests of capabilities not supported on all platforms should be written
to check for the capability being present. If it is not, the test
should merely output 'SKIP' followed by the line terminator, and call
sys.exit() to raise SystemExit, instead of attempting to test the
missing capability. The testing framework (run-tests in this
missing capability. The testing framework (run-tests.py in this
directory, test_main.c in qemu_arm) recognizes this as a skipped test.
There are a few features for which this mechanism cannot be used to
condition a test. The run-tests script uses small scripts in the
condition a test. The run-tests.py script uses small scripts in the
feature_check directory to check whether each such feature is present,
and skips the relevant tests if not.

View File

@ -8,3 +8,7 @@ x = 0x8000000000000000
print((x + 1) // x)
x = 0x86c60128feff5330
print((x + 1) // x)
# these check edge cases where borrow overflows
print((2 ** 48 - 1) ** 2 // (2 ** 48 - 1))
print((2 ** 256 - 2 ** 32) ** 2 // (2 ** 256 - 2 ** 32))

View File

@ -1,2 +1,2 @@
# cmdline: -c print("test") -i
# cmdline: -i -c print("test")
# -c option combined with -i option results in REPL

View File

@ -1,6 +1,6 @@
test
MicroPython \.\+ version
Use \.\+
>>> # cmdline: -c print("test") -i
>>> # cmdline: -i -c print("test")
>>> # -c option combined with -i option results in REPL
>>>

View File

@ -0,0 +1,25 @@
# Test current_task() function
try:
import uasyncio as asyncio
except ImportError:
try:
import asyncio
except ImportError:
print("SKIP")
raise SystemExit
async def task(result):
result[0] = asyncio.current_task()
async def main():
result = [None]
t = asyncio.create_task(task(result))
await asyncio.sleep(0)
await asyncio.sleep(0)
print(t is result[0])
asyncio.run(main())

View File

@ -0,0 +1 @@
True

View File

@ -114,3 +114,4 @@ try:
f.write(bytearray(bsize * free))
except OSError as e:
print("ENOSPC:", e.args[0] == 28) # uerrno.ENOSPC
f.close()

View File

@ -8,6 +8,15 @@ except (ImportError, AttributeError):
print("SKIP")
raise SystemExit
# We need a directory for testing that doesn't already exist.
# Skip the test if it does exist.
temp_dir = "micropy_test_dir"
try:
uos.stat(temp_dir)
print("SKIP")
raise SystemExit
except OSError:
pass
# getcwd and chdir
curdir = uos.getcwd()
@ -21,3 +30,59 @@ print(type(uos.stat("/")))
# listdir and ilistdir
print(type(uos.listdir("/")))
# mkdir
uos.mkdir(temp_dir)
# file create
f = open(temp_dir + "/test", "w")
f.write("hello")
f.close()
# close on a closed file should succeed
f.close()
# construct a file object using the type constructor, with a raw fileno
f = type(f)(2)
print(f)
# file read
f = open(temp_dir + "/test", "r")
print(f.read())
f.close()
# rename
uos.rename(temp_dir + "/test", temp_dir + "/test2")
print(uos.listdir(temp_dir))
# construct new VfsPosix with path argument
vfs = uos.VfsPosix(temp_dir)
print(list(i[0] for i in vfs.ilistdir(".")))
# stat, statvfs
print(type(vfs.stat(".")))
print(type(vfs.statvfs(".")))
# check types of ilistdir with str/bytes arguments
print(type(list(vfs.ilistdir("."))[0][0]))
print(type(list(vfs.ilistdir(b"."))[0][0]))
# remove
uos.remove(temp_dir + "/test2")
print(uos.listdir(temp_dir))
# remove with error
try:
uos.remove(temp_dir + "/test2")
except OSError:
print("remove OSError")
# rmdir
uos.rmdir(temp_dir)
print(temp_dir in uos.listdir())
# rmdir with error
try:
uos.rmdir(temp_dir)
except OSError:
print("rmdir OSError")

View File

@ -2,3 +2,15 @@
True
<class 'tuple'>
<class 'list'>
<io.TextIOWrapper 2>
hello
['test2']
['test2']
<class 'tuple'>
<class 'tuple'>
<class 'str'>
<class 'bytes'>
[]
remove OSError
False
rmdir OSError

View File

@ -1,4 +1,4 @@
This directory doesn't contain real tests, but code snippets to detect
various interpreter features, which can't be/inconvenient to detecte by
other means. Scripts here are executed by run-tests at the beginning of
other means. Scripts here are executed by run-tests.py at the beginning of
testsuite to decide what other test groups to run/exclude.

View File

@ -1,3 +1,6 @@
# check if async/await keywords are supported
async def foo():
await 1
print("async")

View File

@ -1 +1,2 @@
x = const(1)
print(x)

View File

@ -2,3 +2,7 @@
@micropython.native
def f():
pass
f()
print("native")

View File

@ -1,2 +1,2 @@
# check if set literal syntax is supported
{1}
print({1})

View File

@ -44,3 +44,6 @@ try:
except OSError:
print("OSError")
f.close()
# close() on a closed file
f.close()

View File

@ -8,4 +8,4 @@ of JVM.
For example, for OpenJDK 7 on x86_64, following may work:
LD_LIBRARY_PATH=/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/amd64/server ./run-tests jni/*.py
LD_LIBRARY_PATH=/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/amd64/server ./run-tests.py jni/*.py

View File

@ -67,7 +67,7 @@ def main():
cmd_parser.add_argument("files", nargs="*", help="input test files")
args = cmd_parser.parse_args()
# Note pyboard support is copied over from run-tests, not testes, and likely needs revamping
# Note pyboard support is copied over from run-tests.py, not tests, and likely needs revamping
if args.pyboard:
import pyboard

View File

@ -1,731 +0,0 @@
#! /usr/bin/env python3
import os
import subprocess
import sys
import platform
import argparse
import inspect
import re
import threading
import multiprocessing
from multiprocessing.pool import ThreadPool
from glob import glob
# See stackoverflow.com/questions/2632199: __file__ nor sys.argv[0]
# are guaranteed to always work, this one should though.
BASEPATH = os.path.dirname(os.path.abspath(inspect.getsourcefile(lambda: None)))
def base_path(*p):
return os.path.abspath(os.path.join(BASEPATH, *p)).replace('\\', '/')
# Tests require at least CPython 3.3. If your default python3 executable
# is of lower version, you can point MICROPY_CPYTHON3 environment var
# to the correct executable.
if os.name == 'nt':
CPYTHON3 = os.getenv('MICROPY_CPYTHON3', 'python')
MICROPYTHON = os.getenv('MICROPY_MICROPYTHON', base_path('../ports/windows/micropython.exe'))
else:
CPYTHON3 = os.getenv('MICROPY_CPYTHON3', 'python3')
MICROPYTHON = os.getenv('MICROPY_MICROPYTHON', base_path('../ports/unix/micropython'))
# Use CPython options to not save .pyc files, to only access the core standard library
# (not site packages which may clash with u-module names), and improve start up time.
CPYTHON3_CMD = [CPYTHON3, "-Wignore", "-BS"]
# mpy-cross is only needed if --via-mpy command-line arg is passed
MPYCROSS = os.getenv('MICROPY_MPYCROSS', base_path('../mpy-cross/mpy-cross'))
# For diff'ing test output
DIFF = os.getenv('MICROPY_DIFF', 'diff -u')
# Set PYTHONIOENCODING so that CPython will use utf-8 on systems which set another encoding in the locale
os.environ['PYTHONIOENCODING'] = 'utf-8'
def rm_f(fname):
if os.path.exists(fname):
os.remove(fname)
# unescape wanted regex chars and escape unwanted ones
def convert_regex_escapes(line):
cs = []
escape = False
for c in str(line, 'utf8'):
if escape:
escape = False
cs.append(c)
elif c == '\\':
escape = True
elif c in ('(', ')', '[', ']', '{', '}', '.', '*', '+', '^', '$'):
cs.append('\\' + c)
else:
cs.append(c)
# accept carriage-return(s) before final newline
if cs[-1] == '\n':
cs[-1] = '\r*\n'
return bytes(''.join(cs), 'utf8')
def run_micropython(pyb, args, test_file, is_special=False):
special_tests = (
'micropython/meminfo.py', 'basics/bytes_compare3.py',
'basics/builtin_help.py', 'thread/thread_exc2.py',
'esp32/partition_ota.py',
)
had_crash = False
if pyb is None:
# run on PC
if test_file.startswith(('cmdline/', base_path('feature_check/'))) or test_file in special_tests:
# special handling for tests of the unix cmdline program
is_special = True
if is_special:
# check for any cmdline options needed for this test
args = [MICROPYTHON]
with open(test_file, 'rb') as f:
line = f.readline()
if line.startswith(b'# cmdline:'):
# subprocess.check_output on Windows only accepts strings, not bytes
args += [str(c, 'utf-8') for c in line[10:].strip().split()]
# run the test, possibly with redirected input
try:
if 'repl_' in test_file:
# Need to use a PTY to test command line editing
try:
import pty
except ImportError:
# in case pty module is not available, like on Windows
return b'SKIP\n'
import select
def get(required=False):
rv = b''
while True:
ready = select.select([emulator], [], [], 0.02)
if ready[0] == [emulator]:
rv += os.read(emulator, 1024)
else:
if not required or rv:
return rv
def send_get(what):
os.write(emulator, what)
return get()
with open(test_file, 'rb') as f:
# instead of: output_mupy = subprocess.check_output(args, stdin=f)
# openpty returns two read/write file descriptors. The first one is
# used by the program which provides the virtual
# terminal service, and the second one is used by the
# subprogram which requires a tty to work.
emulator, subterminal = pty.openpty()
p = subprocess.Popen(args, stdin=subterminal, stdout=subterminal,
stderr=subprocess.STDOUT, bufsize=0)
banner = get(True)
output_mupy = banner + b''.join(send_get(line) for line in f)
send_get(b'\x04') # exit the REPL, so coverage info is saved
# At this point the process might have exited already, but trying to
# kill it 'again' normally doesn't result in exceptions as Python and/or
# the OS seem to try to handle this nicely. When running Linux on WSL
# though, the situation differs and calling Popen.kill after the process
# terminated results in a ProcessLookupError. Just catch that one here
# since we just want the process to be gone and that's the case.
try:
p.kill()
except ProcessLookupError:
pass
os.close(emulator)
os.close(subterminal)
else:
output_mupy = subprocess.check_output(args + [test_file], env={}, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as error:
return error.output + b'CRASH'
else:
# a standard test run on PC
# create system command
cmdlist = [MICROPYTHON, '-X', 'emit=' + args.emit]
if args.heapsize is not None:
cmdlist.extend(['-X', 'heapsize=' + args.heapsize])
# if running via .mpy, first compile the .py file
if args.via_mpy:
subprocess.check_output([MPYCROSS] + args.mpy_cross_flags.split() + ['-o', 'mpytest.mpy', '-X', 'emit=' + args.emit, test_file])
cmdlist.extend(['-m', 'mpytest'])
else:
cmdlist.append(test_file)
# run the actual test
e = {"LANG": "en_US.UTF-8",
"MICROPYPATH": os.environ["MICROPYPATH"]}
try:
output_mupy = subprocess.check_output(cmdlist, env=e, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as error:
had_crash = True
output_mupy = error.output + b'CRASH'
# clean up if we had an intermediate .mpy file
if args.via_mpy:
rm_f('mpytest.mpy')
else:
# run on pyboard
pyb.enter_raw_repl()
try:
output_mupy = pyb.execfile(test_file)
except pyboard.PyboardError as e:
had_crash = True
if not is_special and e.args[0] == 'exception':
output_mupy = e.args[1] + e.args[2] + b'CRASH'
else:
output_mupy = b'CRASH'
# canonical form for all ports/platforms is to use \n for end-of-line
output_mupy = output_mupy.replace(b'\r\n', b'\n')
# don't try to convert the output if we should skip this test
if had_crash or output_mupy in (b'SKIP\n', b'CRASH'):
return output_mupy
if is_special or test_file in special_tests:
# convert parts of the output that are not stable across runs
with open(test_file + '.exp', 'rb') as f:
lines_exp = []
for line in f.readlines():
if line == b'########\n':
line = (line,)
else:
line = (line, re.compile(convert_regex_escapes(line)))
lines_exp.append(line)
lines_mupy = [line + b'\n' for line in output_mupy.split(b'\n')]
if output_mupy.endswith(b'\n'):
lines_mupy = lines_mupy[:-1] # remove erroneous last empty line
i_mupy = 0
for i in range(len(lines_exp)):
if lines_exp[i][0] == b'########\n':
# 8x #'s means match 0 or more whole lines
line_exp = lines_exp[i + 1]
skip = 0
while i_mupy + skip < len(lines_mupy) and not line_exp[1].match(lines_mupy[i_mupy + skip]):
skip += 1
if i_mupy + skip >= len(lines_mupy):
lines_mupy[i_mupy] = b'######## FAIL\n'
break
del lines_mupy[i_mupy:i_mupy + skip]
lines_mupy.insert(i_mupy, b'########\n')
i_mupy += 1
else:
# a regex
if lines_exp[i][1].match(lines_mupy[i_mupy]):
lines_mupy[i_mupy] = lines_exp[i][0]
else:
# print("don't match: %r %s" % (lines_exp[i][0], lines_mupy[i_mupy])) # DEBUG
pass
i_mupy += 1
if i_mupy >= len(lines_mupy):
break
output_mupy = b''.join(lines_mupy)
return output_mupy
def run_feature_check(pyb, args, base_path, test_file):
if pyb is not None and test_file.startswith("repl_"):
# REPL feature tests will not run via pyboard because they require prompt interactivity
return b""
return run_micropython(pyb, args, base_path("feature_check", test_file), is_special=True)
class ThreadSafeCounter:
def __init__(self, start=0):
self._value = start
self._lock = threading.Lock()
def add(self, to_add):
with self._lock: self._value += to_add
def append(self, arg):
self.add([arg])
@property
def value(self):
return self._value
def run_tests(pyb, tests, args, result_dir, num_threads=1):
test_count = ThreadSafeCounter()
testcase_count = ThreadSafeCounter()
passed_count = ThreadSafeCounter()
failed_tests = ThreadSafeCounter([])
skipped_tests = ThreadSafeCounter([])
skip_tests = set()
skip_native = False
skip_int_big = False
skip_bytearray = False
skip_set_type = False
skip_slice = False
skip_async = False
skip_const = False
skip_revops = False
skip_io_module = False
skip_endian = False
has_complex = True
has_coverage = False
upy_float_precision = 32
# If we're asked to --list-tests, we can't assume that there's a
# connection to target, so we can't run feature checks usefully.
if not (args.list_tests or args.write_exp):
# Even if we run completely different tests in a different directory,
# we need to access feature_checks from the same directory as the
# run-tests script itself so use base_path.
# Check if micropython.native is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'native_check.py')
if output == b'CRASH':
skip_native = True
# Check if arbitrary-precision integers are supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'int_big.py')
if output != b'1000000000000000000000000000000000000000000000\n':
skip_int_big = True
# Check if bytearray is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'bytearray.py')
if output != b'bytearray\n':
skip_bytearray = True
# Check if set type (and set literals) is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'set_check.py')
if output == b'CRASH':
skip_set_type = True
# Check if slice is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'slice.py')
if output != b'slice\n':
skip_slice = True
# Check if async/await keywords are supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'async_check.py')
if output == b'CRASH':
skip_async = True
# Check if const keyword (MicroPython extension) is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'const.py')
if output == b'CRASH':
skip_const = True
# Check if __rOP__ special methods are supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, 'reverse_ops.py')
if output == b'TypeError\n':
skip_revops = True
# Check if uio module exists, and skip such tests if it doesn't
output = run_feature_check(pyb, args, base_path, 'uio_module.py')
if output != b'uio\n':
skip_io_module = True
# Check if emacs repl is supported, and skip such tests if it's not
t = run_feature_check(pyb, args, base_path, 'repl_emacs_check.py')
if 'True' not in str(t, 'ascii'):
skip_tests.add('cmdline/repl_emacs_keys.py')
# Check if words movement in repl is supported, and skip such tests if it's not
t = run_feature_check(pyb, args, base_path, 'repl_words_move_check.py')
if 'True' not in str(t, 'ascii'):
skip_tests.add('cmdline/repl_words_move.py')
upy_byteorder = run_feature_check(pyb, args, base_path, 'byteorder.py')
upy_float_precision = run_feature_check(pyb, args, base_path, 'float.py')
if upy_float_precision == b'CRASH':
upy_float_precision = 0
else:
upy_float_precision = int(upy_float_precision)
has_complex = run_feature_check(pyb, args, base_path, 'complex.py') == b'complex\n'
has_coverage = run_feature_check(pyb, args, base_path, 'coverage.py') == b'coverage\n'
cpy_byteorder = subprocess.check_output(CPYTHON3_CMD + [base_path('feature_check/byteorder.py')])
skip_endian = (upy_byteorder != cpy_byteorder)
# These tests don't test slice explicitly but rather use it to perform the test
misc_slice_tests = (
'builtin_range',
'class_super',
'containment',
'errno1',
'fun_str',
'generator1',
'globals_del',
'memoryview1',
'memoryview_gc',
'object1',
'python34',
'struct_endian',
)
# Some tests shouldn't be run on GitHub Actions
if os.getenv('GITHUB_ACTIONS') == 'true':
skip_tests.add('thread/stress_schedule.py') # has reliability issues
if upy_float_precision == 0:
skip_tests.add('extmod/uctypes_le_float.py')
skip_tests.add('extmod/uctypes_native_float.py')
skip_tests.add('extmod/uctypes_sizeof_float.py')
skip_tests.add('extmod/ujson_dumps_float.py')
skip_tests.add('extmod/ujson_loads_float.py')
skip_tests.add('extmod/urandom_extra_float.py')
skip_tests.add('misc/rge_sm.py')
if upy_float_precision < 32:
skip_tests.add('float/float2int_intbig.py') # requires fp32, there's float2int_fp30_intbig.py instead
skip_tests.add('float/string_format.py') # requires fp32, there's string_format_fp30.py instead
skip_tests.add('float/bytes_construct.py') # requires fp32
skip_tests.add('float/bytearray_construct.py') # requires fp32
if upy_float_precision < 64:
skip_tests.add('float/float_divmod.py') # tested by float/float_divmod_relaxed.py instead
skip_tests.add('float/float2int_doubleprec_intbig.py')
skip_tests.add('float/float_parse_doubleprec.py')
if not has_complex:
skip_tests.add('float/complex1.py')
skip_tests.add('float/complex1_intbig.py')
skip_tests.add('float/complex_special_methods.py')
skip_tests.add('float/int_big_float.py')
skip_tests.add('float/true_value.py')
skip_tests.add('float/types.py')
if not has_coverage:
skip_tests.add('cmdline/cmd_parsetree.py')
# Some tests shouldn't be run on a PC
if args.target == 'unix':
# unix build does not have the GIL so can't run thread mutation tests
for t in tests:
if t.startswith('thread/mutate_'):
skip_tests.add(t)
# Some tests shouldn't be run on pyboard
if args.target != 'unix':
skip_tests.add('basics/exception_chain.py') # warning is not printed
skip_tests.add('micropython/meminfo.py') # output is very different to PC output
skip_tests.add('extmod/machine_mem.py') # raw memory access not supported
if args.target == 'wipy':
skip_tests.add('misc/print_exception.py') # requires error reporting full
skip_tests.update({'extmod/uctypes_%s.py' % t for t in 'bytearray le native_le ptr_le ptr_native_le sizeof sizeof_native array_assign_le array_assign_native_le'.split()}) # requires uctypes
skip_tests.add('extmod/zlibd_decompress.py') # requires zlib
skip_tests.add('extmod/uheapq1.py') # uheapq not supported by WiPy
skip_tests.add('extmod/urandom_basic.py') # requires urandom
skip_tests.add('extmod/urandom_extra.py') # requires urandom
elif args.target == 'esp8266':
skip_tests.add('misc/rge_sm.py') # too large
elif args.target == 'minimal':
skip_tests.add('basics/class_inplace_op.py') # all special methods not supported
skip_tests.add('basics/subclass_native_init.py')# native subclassing corner cases not support
skip_tests.add('misc/rge_sm.py') # too large
skip_tests.add('micropython/opt_level.py') # don't assume line numbers are stored
elif args.target == 'nrf':
skip_tests.add('basics/memoryview1.py') # no item assignment for memoryview
skip_tests.add('extmod/urandom_basic.py') # unimplemented: urandom.seed
skip_tests.add('micropython/opt_level.py') # no support for line numbers
skip_tests.add('misc/non_compliant.py') # no item assignment for bytearray
for t in tests:
if t.startswith('basics/io_'):
skip_tests.add(t)
elif args.target == 'qemu-arm':
skip_tests.add('misc/print_exception.py') # requires sys stdfiles
# Some tests are known to fail on 64-bit machines
if pyb is None and platform.architecture()[0] == '64bit':
pass
# Some tests use unsupported features on Windows
if os.name == 'nt':
skip_tests.add('import/import_file.py') # works but CPython prints forward slashes
# Some tests are known to fail with native emitter
# Remove them from the below when they work
if args.emit == 'native':
skip_tests.update({'basics/%s.py' % t for t in 'gen_yield_from_close generator_name'.split()}) # require raise_varargs, generator name
skip_tests.update({'basics/async_%s.py' % t for t in 'with with2 with_break with_return'.split()}) # require async_with
skip_tests.update({'basics/%s.py' % t for t in 'try_reraise try_reraise2'.split()}) # require raise_varargs
skip_tests.add('basics/annotate_var.py') # requires checking for unbound local
skip_tests.add('basics/del_deref.py') # requires checking for unbound local
skip_tests.add('basics/del_local.py') # requires checking for unbound local
skip_tests.add('basics/exception_chain.py') # raise from is not supported
skip_tests.add('basics/scope_implicit.py') # requires checking for unbound local
skip_tests.add('basics/try_finally_return2.py') # requires raise_varargs
skip_tests.add('basics/unboundlocal.py') # requires checking for unbound local
skip_tests.add('extmod/uasyncio_event.py') # unknown issue
skip_tests.add('extmod/uasyncio_lock.py') # requires async with
skip_tests.add('extmod/uasyncio_micropython.py') # unknown issue
skip_tests.add('extmod/uasyncio_wait_for.py') # unknown issue
skip_tests.add('misc/features.py') # requires raise_varargs
skip_tests.add('misc/print_exception.py') # because native doesn't have proper traceback info
skip_tests.add('misc/sys_exc_info.py') # sys.exc_info() is not supported for native
skip_tests.add('micropython/emg_exc.py') # because native doesn't have proper traceback info
skip_tests.add('micropython/heapalloc_traceback.py') # because native doesn't have proper traceback info
skip_tests.add('micropython/opt_level_lineno.py') # native doesn't have proper traceback info
skip_tests.add('micropython/schedule.py') # native code doesn't check pending events
def run_one_test(test_file):
test_file = test_file.replace('\\', '/')
if args.filters:
# Default verdict is the opposit of the first action
verdict = "include" if args.filters[0][0] == "exclude" else "exclude"
for action, pat in args.filters:
if pat.search(test_file):
verdict = action
if verdict == "exclude":
return
test_basename = test_file.replace('..', '_').replace('./', '').replace('/', '_')
test_name = os.path.splitext(os.path.basename(test_file))[0]
is_native = test_name.startswith("native_") or test_name.startswith("viper_") or args.emit == "native"
is_endian = test_name.endswith("_endian")
is_int_big = test_name.startswith("int_big") or test_name.endswith("_intbig")
is_bytearray = test_name.startswith("bytearray") or test_name.endswith("_bytearray")
is_set_type = test_name.startswith("set_") or test_name.startswith("frozenset")
is_slice = test_name.find("slice") != -1 or test_name in misc_slice_tests
is_async = test_name.startswith("async_")
is_const = test_name.startswith("const")
is_io_module = test_name.startswith("io_")
skip_it = test_file in skip_tests
skip_it |= skip_native and is_native
skip_it |= skip_endian and is_endian
skip_it |= skip_int_big and is_int_big
skip_it |= skip_bytearray and is_bytearray
skip_it |= skip_set_type and is_set_type
skip_it |= skip_slice and is_slice
skip_it |= skip_async and is_async
skip_it |= skip_const and is_const
skip_it |= skip_revops and "reverse_op" in test_name
skip_it |= skip_io_module and is_io_module
if args.list_tests:
if not skip_it:
print(test_file)
return
if skip_it:
print("skip ", test_file)
skipped_tests.append(test_name)
return
# get expected output
test_file_expected = test_file + '.exp'
if os.path.isfile(test_file_expected):
# expected output given by a file, so read that in
with open(test_file_expected, 'rb') as f:
output_expected = f.read()
else:
e = {"PYTHONPATH": os.getcwd(),
"PATH": os.environ["PATH"],
"LANG": "en_US.UTF-8"}
# run CPython to work out expected output
try:
output_expected = subprocess.check_output(CPYTHON3_CMD + [test_file], env=e, stderr=subprocess.STDOUT)
if args.write_exp:
with open(test_file_expected, 'wb') as f:
f.write(output_expected)
except subprocess.CalledProcessError as error:
output_expected = error.output + b'CPYTHON3 CRASH'
# canonical form for all host platforms is to use \n for end-of-line
output_expected = output_expected.replace(b'\r\n', b'\n')
if args.write_exp:
return
# run MicroPython
output_mupy = run_micropython(pyb, args, test_file)
if output_mupy == b'SKIP\n':
print("skip ", test_file)
skipped_tests.append(test_name)
return
testcase_count.add(len(output_expected.splitlines()))
filename_expected = os.path.join(result_dir, test_basename + ".exp")
filename_mupy = os.path.join(result_dir, test_basename + ".out")
if output_expected == output_mupy:
# print("pass ", test_file)
passed_count.add(1)
rm_f(filename_expected)
rm_f(filename_mupy)
else:
with open(filename_expected, "wb") as f:
f.write(output_expected)
with open(filename_mupy, "wb") as f:
f.write(output_mupy)
print("### Expected")
print(output_expected)
print("### Actual")
print(output_mupy)
print("FAIL ", test_file)
failed_tests.append(test_file)
test_count.add(1)
if args.list_tests:
return True
if num_threads > 1:
pool = ThreadPool(num_threads)
pool.map(run_one_test, tests)
else:
for test in tests:
run_one_test(test)
print("{} tests performed ({} individual testcases)".format(test_count.value, testcase_count.value))
print("{} tests passed".format(passed_count.value))
if len(skipped_tests.value) > 0:
print("{} tests skipped: {}".format(len(skipped_tests.value), ' '.join(sorted(skipped_tests.value))))
if len(failed_tests.value) > 0:
print("{} tests failed: {}".format(len(failed_tests.value), ' '.join(sorted(failed_tests.value))))
return False
# all tests succeeded
return True
class append_filter(argparse.Action):
def __init__(self, option_strings, dest, **kwargs):
super().__init__(option_strings, dest, default=[], **kwargs)
def __call__(self, parser, args, value, option):
if not hasattr(args, self.dest):
args.filters = []
if option.startswith(("-e", "--e")):
option = "exclude"
else:
option = "include"
args.filters.append((option, re.compile(value)))
def main():
cmd_parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='''Run and manage tests for MicroPython.
Tests are discovered by scanning test directories for .py files or using the
specified test files. If test files nor directories are specified, the script
expects to be ran in the tests directory (where this file is located) and the
builtin tests suitable for the target platform are ran.
When running tests, run-tests compares the MicroPython output of the test with the output
produced by running the test through CPython unless a <test>.exp file is found, in which
case it is used as comparison.
If a test fails, run-tests produces a pair of <test>.out and <test>.exp files in the result
directory with the MicroPython output and the expectations, respectively.
''',
epilog='''\
Options -i and -e can be multiple and processed in the order given. Regex
"search" (vs "match") operation is used. An action (include/exclude) of
the last matching regex is used:
run-tests -i async - exclude all, then include tests containing "async" anywhere
run-tests -e '/big.+int' - include all, then exclude by regex
run-tests -e async -i async_foo - include all, exclude async, yet still include async_foo
''')
cmd_parser.add_argument('--target', default='unix', help='the target platform')
cmd_parser.add_argument('--device', default='/dev/ttyACM0', help='the serial device or the IP address of the pyboard')
cmd_parser.add_argument('-b', '--baudrate', default=115200, help='the baud rate of the serial device')
cmd_parser.add_argument('-u', '--user', default='micro', help='the telnet login username')
cmd_parser.add_argument('-p', '--password', default='python', help='the telnet login password')
cmd_parser.add_argument('-d', '--test-dirs', nargs='*', help='input test directories (if no files given)')
cmd_parser.add_argument('-r', '--result-dir', default=base_path('results'), help='directory for test results')
cmd_parser.add_argument('-e', '--exclude', action=append_filter, metavar='REGEX', dest='filters', help='exclude test by regex on path/name.py')
cmd_parser.add_argument('-i', '--include', action=append_filter, metavar='REGEX', dest='filters', help='include test by regex on path/name.py')
cmd_parser.add_argument('--write-exp', action='store_true', help='use CPython to generate .exp files to run tests w/o CPython')
cmd_parser.add_argument('--list-tests', action='store_true', help='list tests instead of running them')
cmd_parser.add_argument('--emit', default='bytecode', help='MicroPython emitter to use (bytecode or native)')
cmd_parser.add_argument('--heapsize', help='heapsize to use (use default if not specified)')
cmd_parser.add_argument('--via-mpy', action='store_true', help='compile .py files to .mpy first')
cmd_parser.add_argument('--mpy-cross-flags', default='-mcache-lookup-bc', help='flags to pass to mpy-cross')
cmd_parser.add_argument('--keep-path', action='store_true', help='do not clear MICROPYPATH when running tests')
cmd_parser.add_argument('-j', '--jobs', default=1, metavar='N', type=int, help='Number of tests to run simultaneously')
cmd_parser.add_argument('--auto-jobs', action='store_const', dest='jobs', const=multiprocessing.cpu_count(), help='Set the -j values to the CPU (thread) count')
cmd_parser.add_argument('files', nargs='*', help='input test files')
cmd_parser.add_argument('--print-failures', action='store_true', help='print the diff of expected vs. actual output for failed tests and exit')
cmd_parser.add_argument('--clean-failures', action='store_true', help='delete the .exp and .out files from failed tests and exit')
args = cmd_parser.parse_args()
if args.print_failures:
for exp in glob(os.path.join(args.result_dir, "*.exp")):
testbase = exp[:-4]
print()
print("FAILURE {0}".format(testbase))
os.system("{0} {1}.exp {1}.out".format(DIFF, testbase))
sys.exit(0)
if args.clean_failures:
for f in glob(os.path.join(args.result_dir, "*.exp")) + glob(os.path.join(args.result_dir, "*.out")):
os.remove(f)
sys.exit(0)
LOCAL_TARGETS = ('unix', 'qemu-arm',)
EXTERNAL_TARGETS = ('pyboard', 'wipy', 'esp8266', 'esp32', 'minimal', 'nrf')
if args.target in LOCAL_TARGETS or args.list_tests:
pyb = None
elif args.target in EXTERNAL_TARGETS:
global pyboard
sys.path.append(base_path('../tools'))
import pyboard
pyb = pyboard.Pyboard(args.device, args.baudrate, args.user, args.password)
pyb.enter_raw_repl()
else:
raise ValueError('target must be one of %s' % ", ".join(LOCAL_TARGETS + EXTERNAL_TARGETS))
if len(args.files) == 0:
if args.test_dirs is None:
test_dirs = ('basics', 'micropython', 'misc', 'extmod',)
if args.target == 'pyboard':
# run pyboard tests
test_dirs += ('float', 'stress', 'pyb', 'pybnative', 'inlineasm')
elif args.target in ('esp8266', 'esp32', 'minimal', 'nrf'):
test_dirs += ('float',)
elif args.target == 'wipy':
# run WiPy tests
test_dirs += ('wipy',)
elif args.target == 'unix':
# run PC tests
test_dirs += ('float', 'import', 'io', 'stress', 'unicode', 'unix', 'cmdline', '../extmod/ulab/tests')
elif args.target == 'qemu-arm':
if not args.write_exp:
raise ValueError('--target=qemu-arm must be used with --write-exp')
# Generate expected output files for qemu run.
# This list should match the test_dirs tuple in tinytest-codegen.py.
test_dirs += ('float', 'inlineasm', 'qemu-arm',)
else:
# run tests from these directories
test_dirs = args.test_dirs
tests = sorted(test_file for test_files in (glob('{}/*.py'.format(dir)) for dir in test_dirs) for test_file in test_files)
else:
# tests explicitly given
tests = args.files
if not args.keep_path:
# clear search path to make sure tests use only builtin modules and those in extmod
os.environ['MICROPYPATH'] = os.pathsep + base_path('../extmod') + os.pathsep + base_path('.')
try:
os.makedirs(args.result_dir, exist_ok=True)
res = run_tests(pyb, tests, args, args.result_dir, args.jobs)
finally:
if pyb:
pyb.close()
if not res:
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -1,6 +1,6 @@
#
# This is minimal MicroPython variant of run-tests script, which uses
# .exp files as generated by run-tests --write-exp. It is useful to run
# This is minimal MicroPython variant of run-tests.py script, which uses
# .exp files as generated by run-tests.py --write-exp. It is useful to run
# testsuite on systems which have neither CPython3 nor unix shell.
# This script is intended to be run by the same interpreter executable
# which is to be tested, so should use minimal language functionality.

View File

@ -1,7 +1,7 @@
#!/bin/sh
#
# This is plain shell variant of run-tests script, which uses .exp files
# as generated by run-tests --write-exp. It is useful to run testsuite
# This is plain shell variant of run-tests.py script, which uses .exp files
# as generated by run-tests.py --write-exp. It is useful to run testsuite
# on embedded systems which don't have CPython3.
#

888
tests/run-tests.py Executable file
View File

@ -0,0 +1,888 @@
#! /usr/bin/env python3
import os
import subprocess
import sys
import platform
import argparse
import inspect
import re
import threading
import multiprocessing
from multiprocessing.pool import ThreadPool
from glob import glob
# See stackoverflow.com/questions/2632199: __file__ nor sys.argv[0]
# are guaranteed to always work, this one should though.
BASEPATH = os.path.dirname(os.path.abspath(inspect.getsourcefile(lambda: None)))
def base_path(*p):
return os.path.abspath(os.path.join(BASEPATH, *p)).replace("\\", "/")
# Tests require at least CPython 3.3. If your default python3 executable
# is of lower version, you can point MICROPY_CPYTHON3 environment var
# to the correct executable.
if os.name == "nt":
CPYTHON3 = os.getenv("MICROPY_CPYTHON3", "python")
MICROPYTHON = os.getenv("MICROPY_MICROPYTHON", base_path("../ports/windows/micropython.exe"))
else:
CPYTHON3 = os.getenv("MICROPY_CPYTHON3", "python3")
MICROPYTHON = os.getenv("MICROPY_MICROPYTHON", base_path("../ports/unix/micropython"))
# Use CPython options to not save .pyc files, to only access the core standard library
# (not site packages which may clash with u-module names), and improve start up time.
CPYTHON3_CMD = [CPYTHON3, "-Wignore", "-BS"]
# mpy-cross is only needed if --via-mpy command-line arg is passed
MPYCROSS = os.getenv("MICROPY_MPYCROSS", base_path("../mpy-cross/mpy-cross"))
# For diff'ing test output
DIFF = os.getenv("MICROPY_DIFF", "diff -u")
# Set PYTHONIOENCODING so that CPython will use utf-8 on systems which set another encoding in the locale
os.environ["PYTHONIOENCODING"] = "utf-8"
def rm_f(fname):
if os.path.exists(fname):
os.remove(fname)
# unescape wanted regex chars and escape unwanted ones
def convert_regex_escapes(line):
cs = []
escape = False
for c in str(line, "utf8"):
if escape:
escape = False
cs.append(c)
elif c == "\\":
escape = True
elif c in ("(", ")", "[", "]", "{", "}", ".", "*", "+", "^", "$"):
cs.append("\\" + c)
else:
cs.append(c)
# accept carriage-return(s) before final newline
if cs[-1] == "\n":
cs[-1] = "\r*\n"
return bytes("".join(cs), "utf8")
def run_micropython(pyb, args, test_file, is_special=False):
special_tests = (
"micropython/meminfo.py",
"basics/bytes_compare3.py",
"basics/builtin_help.py",
"thread/thread_exc2.py",
"esp32/partition_ota.py",
)
had_crash = False
if pyb is None:
# run on PC
if (
test_file.startswith(("cmdline/", base_path("feature_check/")))
or test_file in special_tests
):
# special handling for tests of the unix cmdline program
is_special = True
if is_special:
# check for any cmdline options needed for this test
args = [MICROPYTHON]
with open(test_file, "rb") as f:
line = f.readline()
if line.startswith(b"# cmdline:"):
# subprocess.check_output on Windows only accepts strings, not bytes
args += [str(c, "utf-8") for c in line[10:].strip().split()]
# run the test, possibly with redirected input
try:
if "repl_" in test_file:
# Need to use a PTY to test command line editing
try:
import pty
except ImportError:
# in case pty module is not available, like on Windows
return b"SKIP\n"
import select
def get(required=False):
rv = b""
while True:
ready = select.select([emulator], [], [], 0.02)
if ready[0] == [emulator]:
rv += os.read(emulator, 1024)
else:
if not required or rv:
return rv
def send_get(what):
os.write(emulator, what)
return get()
with open(test_file, "rb") as f:
# instead of: output_mupy = subprocess.check_output(args, stdin=f)
# openpty returns two read/write file descriptors. The first one is
# used by the program which provides the virtual
# terminal service, and the second one is used by the
# subprogram which requires a tty to work.
emulator, subterminal = pty.openpty()
p = subprocess.Popen(
args,
stdin=subterminal,
stdout=subterminal,
stderr=subprocess.STDOUT,
bufsize=0,
)
banner = get(True)
output_mupy = banner + b"".join(send_get(line) for line in f)
send_get(b"\x04") # exit the REPL, so coverage info is saved
# At this point the process might have exited already, but trying to
# kill it 'again' normally doesn't result in exceptions as Python and/or
# the OS seem to try to handle this nicely. When running Linux on WSL
# though, the situation differs and calling Popen.kill after the process
# terminated results in a ProcessLookupError. Just catch that one here
# since we just want the process to be gone and that's the case.
try:
p.kill()
except ProcessLookupError:
pass
os.close(emulator)
os.close(subterminal)
else:
output_mupy = subprocess.check_output(
args + [test_file], stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as error:
return error.output + b"CRASH"
else:
# a standard test run on PC
# create system command
cmdlist = [MICROPYTHON, "-X", "emit=" + args.emit]
if args.heapsize is not None:
cmdlist.extend(["-X", "heapsize=" + args.heapsize])
# if running via .mpy, first compile the .py file
if args.via_mpy:
subprocess.check_output(
[MPYCROSS]
+ args.mpy_cross_flags.split()
+ ["-o", "mpytest.mpy", "-X", "emit=" + args.emit, test_file]
)
cmdlist.extend(["-m", "mpytest"])
else:
cmdlist.append(test_file)
# run the actual test
e = {"LANG": "en_US.UTF-8", "MICROPYPATH": os.environ["MICROPYPATH"]}
try:
output_mupy = subprocess.check_output(cmdlist, env=e, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as error:
had_crash = True
output_mupy = error.output + b"CRASH"
# clean up if we had an intermediate .mpy file
if args.via_mpy:
rm_f("mpytest.mpy")
else:
# run on pyboard
pyb.enter_raw_repl()
try:
output_mupy = pyb.execfile(test_file)
except pyboard.PyboardError as e:
had_crash = True
if not is_special and e.args[0] == "exception":
output_mupy = e.args[1] + e.args[2] + b"CRASH"
else:
output_mupy = b"CRASH"
# canonical form for all ports/platforms is to use \n for end-of-line
output_mupy = output_mupy.replace(b"\r\n", b"\n")
# don't try to convert the output if we should skip this test
if had_crash or output_mupy in (b"SKIP\n", b"CRASH"):
return output_mupy
if is_special or test_file in special_tests:
# convert parts of the output that are not stable across runs
with open(test_file + ".exp", "rb") as f:
lines_exp = []
for line in f.readlines():
if line == b"########\n":
line = (line,)
else:
line = (line, re.compile(convert_regex_escapes(line)))
lines_exp.append(line)
lines_mupy = [line + b"\n" for line in output_mupy.split(b"\n")]
if output_mupy.endswith(b"\n"):
lines_mupy = lines_mupy[:-1] # remove erroneous last empty line
i_mupy = 0
for i in range(len(lines_exp)):
if lines_exp[i][0] == b"########\n":
# 8x #'s means match 0 or more whole lines
line_exp = lines_exp[i + 1]
skip = 0
while i_mupy + skip < len(lines_mupy) and not line_exp[1].match(
lines_mupy[i_mupy + skip]
):
skip += 1
if i_mupy + skip >= len(lines_mupy):
lines_mupy[i_mupy] = b"######## FAIL\n"
break
del lines_mupy[i_mupy : i_mupy + skip]
lines_mupy.insert(i_mupy, b"########\n")
i_mupy += 1
else:
# a regex
if lines_exp[i][1].match(lines_mupy[i_mupy]):
# print("match", lines_exp[i][0], lines_mupy[i_mupy])
lines_mupy[i_mupy] = lines_exp[i][0]
else:
# print("don't match: %r %s" % (lines_exp[i][0], lines_mupy[i_mupy])) # DEBUG
pass
i_mupy += 1
if i_mupy >= len(lines_mupy):
break
output_mupy = b"".join(lines_mupy)
return output_mupy
def run_feature_check(pyb, args, base_path, test_file):
if pyb is not None and test_file.startswith("repl_"):
# REPL feature tests will not run via pyboard because they require prompt interactivity
return b""
return run_micropython(pyb, args, base_path("feature_check", test_file), is_special=True)
class ThreadSafeCounter:
def __init__(self, start=0):
self._value = start
self._lock = threading.Lock()
def add(self, to_add):
with self._lock:
self._value += to_add
def append(self, arg):
self.add([arg])
@property
def value(self):
return self._value
def run_tests(pyb, tests, args, result_dir, num_threads=1):
test_count = ThreadSafeCounter()
testcase_count = ThreadSafeCounter()
passed_count = ThreadSafeCounter()
failed_tests = ThreadSafeCounter([])
skipped_tests = ThreadSafeCounter([])
skip_tests = set()
skip_native = False
skip_int_big = False
skip_bytearray = False
skip_set_type = False
skip_slice = False
skip_async = False
skip_const = False
skip_revops = False
skip_io_module = False
skip_endian = False
has_complex = True
has_coverage = False
upy_float_precision = 32
# If we're asked to --list-tests, we can't assume that there's a
# connection to target, so we can't run feature checks usefully.
if not (args.list_tests or args.write_exp):
# Even if we run completely different tests in a different directory,
# we need to access feature_checks from the same directory as the
# run-tests script itself so use base_path.
# Check if micropython.native is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "native_check.py")
if output.endswith(b"CRASH"):
skip_native = True
# Check if arbitrary-precision integers are supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "int_big.py")
if output != b"1000000000000000000000000000000000000000000000\n":
skip_int_big = True
# Check if bytearray is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "bytearray.py")
if output != b"bytearray\n":
skip_bytearray = True
# Check if set type (and set literals) is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "set_check.py")
if output.endswith(b"CRASH"):
skip_set_type = True
# Check if slice is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "slice.py")
if output != b"slice\n":
skip_slice = True
# Check if async/await keywords are supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "async_check.py")
if output.endswith(b"CRASH"):
skip_async = True
# Check if const keyword (MicroPython extension) is supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "const.py")
if output.endswith(b"CRASH"):
skip_const = True
# Check if __rOP__ special methods are supported, and skip such tests if it's not
output = run_feature_check(pyb, args, base_path, "reverse_ops.py")
if output == b"TypeError\n":
skip_revops = True
# Check if uio module exists, and skip such tests if it doesn't
output = run_feature_check(pyb, args, base_path, "uio_module.py")
if output != b"uio\n":
skip_io_module = True
# Check if emacs repl is supported, and skip such tests if it's not
t = run_feature_check(pyb, args, base_path, "repl_emacs_check.py")
if "True" not in str(t, "ascii"):
skip_tests.add("cmdline/repl_emacs_keys.py")
# Check if words movement in repl is supported, and skip such tests if it's not
t = run_feature_check(pyb, args, base_path, "repl_words_move_check.py")
if "True" not in str(t, "ascii"):
skip_tests.add("cmdline/repl_words_move.py")
upy_byteorder = run_feature_check(pyb, args, base_path, "byteorder.py")
upy_float_precision = run_feature_check(pyb, args, base_path, "float.py")
if upy_float_precision.endswith(b"CRASH"):
upy_float_precision = 0
else:
upy_float_precision = int(upy_float_precision)
has_complex = run_feature_check(pyb, args, base_path, "complex.py") == b"complex\n"
has_coverage = run_feature_check(pyb, args, base_path, "coverage.py") == b"coverage\n"
cpy_byteorder = subprocess.check_output(
CPYTHON3_CMD + [base_path("feature_check/byteorder.py")]
)
skip_endian = upy_byteorder != cpy_byteorder
# These tests don't test slice explicitly but rather use it to perform the test
misc_slice_tests = (
"builtin_range",
"class_super",
"containment",
"errno1",
"fun_str",
"generator1",
"globals_del",
"memoryview1",
"memoryview_gc",
"object1",
"python34",
"struct_endian",
)
# Some tests shouldn't be run on GitHub Actions
if os.getenv("GITHUB_ACTIONS") == "true":
skip_tests.add("thread/stress_schedule.py") # has reliability issues
if upy_float_precision == 0:
skip_tests.add("extmod/uctypes_le_float.py")
skip_tests.add("extmod/uctypes_native_float.py")
skip_tests.add("extmod/uctypes_sizeof_float.py")
skip_tests.add("extmod/ujson_dumps_float.py")
skip_tests.add("extmod/ujson_loads_float.py")
skip_tests.add("extmod/urandom_extra_float.py")
skip_tests.add("misc/rge_sm.py")
if upy_float_precision < 32:
skip_tests.add(
"float/float2int_intbig.py"
) # requires fp32, there's float2int_fp30_intbig.py instead
skip_tests.add(
"float/string_format.py"
) # requires fp32, there's string_format_fp30.py instead
skip_tests.add("float/bytes_construct.py") # requires fp32
skip_tests.add("float/bytearray_construct.py") # requires fp32
if upy_float_precision < 64:
skip_tests.add("float/float_divmod.py") # tested by float/float_divmod_relaxed.py instead
skip_tests.add("float/float2int_doubleprec_intbig.py")
skip_tests.add("float/float_parse_doubleprec.py")
if not has_complex:
skip_tests.add("float/complex1.py")
skip_tests.add("float/complex1_intbig.py")
skip_tests.add("float/complex_special_methods.py")
skip_tests.add("float/int_big_float.py")
skip_tests.add("float/true_value.py")
skip_tests.add("float/types.py")
if not has_coverage:
skip_tests.add("cmdline/cmd_parsetree.py")
# Some tests shouldn't be run on a PC
if args.target == "unix":
# unix build does not have the GIL so can't run thread mutation tests
for t in tests:
if t.startswith("thread/mutate_"):
skip_tests.add(t)
# Some tests shouldn't be run on pyboard
if args.target != "unix":
skip_tests.add("basics/exception_chain.py") # warning is not printed
skip_tests.add("micropython/meminfo.py") # output is very different to PC output
skip_tests.add("extmod/machine_mem.py") # raw memory access not supported
if args.target == "wipy":
skip_tests.add("misc/print_exception.py") # requires error reporting full
skip_tests.update(
{
"extmod/uctypes_%s.py" % t
for t in "bytearray le native_le ptr_le ptr_native_le sizeof sizeof_native array_assign_le array_assign_native_le".split()
}
) # requires uctypes
skip_tests.add("extmod/zlibd_decompress.py") # requires zlib
skip_tests.add("extmod/uheapq1.py") # uheapq not supported by WiPy
skip_tests.add("extmod/urandom_basic.py") # requires urandom
skip_tests.add("extmod/urandom_extra.py") # requires urandom
elif args.target == "esp8266":
skip_tests.add("misc/rge_sm.py") # too large
elif args.target == "minimal":
skip_tests.add("basics/class_inplace_op.py") # all special methods not supported
skip_tests.add(
"basics/subclass_native_init.py"
) # native subclassing corner cases not support
skip_tests.add("misc/rge_sm.py") # too large
skip_tests.add("micropython/opt_level.py") # don't assume line numbers are stored
elif args.target == "nrf":
skip_tests.add("basics/memoryview1.py") # no item assignment for memoryview
skip_tests.add("extmod/urandom_basic.py") # unimplemented: urandom.seed
skip_tests.add("micropython/opt_level.py") # no support for line numbers
skip_tests.add("misc/non_compliant.py") # no item assignment for bytearray
for t in tests:
if t.startswith("basics/io_"):
skip_tests.add(t)
elif args.target == "qemu-arm":
skip_tests.add("misc/print_exception.py") # requires sys stdfiles
# Some tests are known to fail on 64-bit machines
if pyb is None and platform.architecture()[0] == "64bit":
pass
# Some tests use unsupported features on Windows
if os.name == "nt":
skip_tests.add("import/import_file.py") # works but CPython prints forward slashes
# Some tests are known to fail with native emitter
# Remove them from the below when they work
if args.emit == "native":
skip_tests.update(
{"basics/%s.py" % t for t in "gen_yield_from_close generator_name".split()}
) # require raise_varargs, generator name
skip_tests.update(
{"basics/async_%s.py" % t for t in "with with2 with_break with_return".split()}
) # require async_with
skip_tests.update(
{"basics/%s.py" % t for t in "try_reraise try_reraise2".split()}
) # require raise_varargs
skip_tests.add("basics/annotate_var.py") # requires checking for unbound local
skip_tests.add("basics/del_deref.py") # requires checking for unbound local
skip_tests.add("basics/del_local.py") # requires checking for unbound local
skip_tests.add("basics/exception_chain.py") # raise from is not supported
skip_tests.add("basics/scope_implicit.py") # requires checking for unbound local
skip_tests.add("basics/try_finally_return2.py") # requires raise_varargs
skip_tests.add("basics/unboundlocal.py") # requires checking for unbound local
skip_tests.add("extmod/uasyncio_event.py") # unknown issue
skip_tests.add("extmod/uasyncio_lock.py") # requires async with
skip_tests.add("extmod/uasyncio_micropython.py") # unknown issue
skip_tests.add("extmod/uasyncio_wait_for.py") # unknown issue
skip_tests.add("misc/features.py") # requires raise_varargs
skip_tests.add(
"misc/print_exception.py"
) # because native doesn't have proper traceback info
skip_tests.add("misc/sys_exc_info.py") # sys.exc_info() is not supported for native
skip_tests.add(
"micropython/emg_exc.py"
) # because native doesn't have proper traceback info
skip_tests.add(
"micropython/heapalloc_traceback.py"
) # because native doesn't have proper traceback info
skip_tests.add(
"micropython/opt_level_lineno.py"
) # native doesn't have proper traceback info
skip_tests.add("micropython/schedule.py") # native code doesn't check pending events
def run_one_test(test_file):
test_file = test_file.replace("\\", "/")
if args.filters:
# Default verdict is the opposit of the first action
verdict = "include" if args.filters[0][0] == "exclude" else "exclude"
for action, pat in args.filters:
if pat.search(test_file):
verdict = action
if verdict == "exclude":
return
test_basename = test_file.replace("..", "_").replace("./", "").replace("/", "_")
test_name = os.path.splitext(os.path.basename(test_file))[0]
is_native = (
test_name.startswith("native_")
or test_name.startswith("viper_")
or args.emit == "native"
)
is_endian = test_name.endswith("_endian")
is_int_big = test_name.startswith("int_big") or test_name.endswith("_intbig")
is_bytearray = test_name.startswith("bytearray") or test_name.endswith("_bytearray")
is_set_type = test_name.startswith("set_") or test_name.startswith("frozenset")
is_slice = test_name.find("slice") != -1 or test_name in misc_slice_tests
is_async = test_name.startswith("async_")
is_const = test_name.startswith("const")
is_io_module = test_name.startswith("io_")
skip_it = test_file in skip_tests
skip_it |= skip_native and is_native
skip_it |= skip_endian and is_endian
skip_it |= skip_int_big and is_int_big
skip_it |= skip_bytearray and is_bytearray
skip_it |= skip_set_type and is_set_type
skip_it |= skip_slice and is_slice
skip_it |= skip_async and is_async
skip_it |= skip_const and is_const
skip_it |= skip_revops and "reverse_op" in test_name
skip_it |= skip_io_module and is_io_module
if args.list_tests:
if not skip_it:
print(test_file)
return
if skip_it:
print("skip ", test_file)
skipped_tests.append(test_name)
return
# get expected output
test_file_expected = test_file + ".exp"
if os.path.isfile(test_file_expected):
# expected output given by a file, so read that in
with open(test_file_expected, "rb") as f:
output_expected = f.read()
else:
e = {"PYTHONPATH": os.getcwd(), "PATH": os.environ["PATH"], "LANG": "en_US.UTF-8"}
# run CPython to work out expected output
try:
output_expected = subprocess.check_output(
CPYTHON3_CMD + [test_file], env=e, stderr=subprocess.STDOUT
)
if args.write_exp:
with open(test_file_expected, "wb") as f:
f.write(output_expected)
except subprocess.CalledProcessError as error:
output_expected = error.output + b"CPYTHON3 CRASH"
# canonical form for all host platforms is to use \n for end-of-line
output_expected = output_expected.replace(b"\r\n", b"\n")
if args.write_exp:
return
# run MicroPython
output_mupy = run_micropython(pyb, args, test_file)
if output_mupy == b"SKIP\n":
print("skip ", test_file)
skipped_tests.append(test_name)
return
testcase_count.add(len(output_expected.splitlines()))
filename_expected = os.path.join(result_dir, test_basename + ".exp")
filename_mupy = os.path.join(result_dir, test_basename + ".out")
if output_expected == output_mupy:
# print("pass ", test_file)
passed_count.add(1)
rm_f(filename_expected)
rm_f(filename_mupy)
else:
with open(filename_expected, "wb") as f:
f.write(output_expected)
with open(filename_mupy, "wb") as f:
f.write(output_mupy)
print("### Expected")
print(output_expected)
print("### Actual")
print(output_mupy)
print("FAIL ", test_file)
failed_tests.append(test_file)
test_count.add(1)
if args.list_tests:
return True
if num_threads > 1:
pool = ThreadPool(num_threads)
pool.map(run_one_test, tests)
else:
for test in tests:
run_one_test(test)
print(
"{} tests performed ({} individual testcases)".format(
test_count.value, testcase_count.value
)
)
print("{} tests passed".format(passed_count.value))
if len(skipped_tests.value) > 0:
print(
"{} tests skipped: {}".format(
len(skipped_tests.value), " ".join(sorted(skipped_tests.value))
)
)
if len(failed_tests.value) > 0:
print(
"{} tests failed: {}".format(
len(failed_tests.value), " ".join(sorted(failed_tests.value))
)
)
return False
# all tests succeeded
return True
class append_filter(argparse.Action):
def __init__(self, option_strings, dest, **kwargs):
super().__init__(option_strings, dest, default=[], **kwargs)
def __call__(self, parser, args, value, option):
if not hasattr(args, self.dest):
args.filters = []
if option.startswith(("-e", "--e")):
option = "exclude"
else:
option = "include"
args.filters.append((option, re.compile(value)))
def main():
cmd_parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="""Run and manage tests for MicroPython.
Tests are discovered by scanning test directories for .py files or using the
specified test files. If test files nor directories are specified, the script
expects to be ran in the tests directory (where this file is located) and the
builtin tests suitable for the target platform are ran.
When running tests, run-tests.py compares the MicroPython output of the test with the output
produced by running the test through CPython unless a <test>.exp file is found, in which
case it is used as comparison.
If a test fails, run-tests.py produces a pair of <test>.out and <test>.exp files in the result
directory with the MicroPython output and the expectations, respectively.
""",
epilog="""\
Options -i and -e can be multiple and processed in the order given. Regex
"search" (vs "match") operation is used. An action (include/exclude) of
the last matching regex is used:
run-tests.py -i async - exclude all, then include tests containing "async" anywhere
run-tests.py -e '/big.+int' - include all, then exclude by regex
run-tests.py -e async -i async_foo - include all, exclude async, yet still include async_foo
""",
)
cmd_parser.add_argument("--target", default="unix", help="the target platform")
cmd_parser.add_argument(
"--device",
default="/dev/ttyACM0",
help="the serial device or the IP address of the pyboard",
)
cmd_parser.add_argument(
"-b", "--baudrate", default=115200, help="the baud rate of the serial device"
)
cmd_parser.add_argument("-u", "--user", default="micro", help="the telnet login username")
cmd_parser.add_argument("-p", "--password", default="python", help="the telnet login password")
cmd_parser.add_argument(
"-d", "--test-dirs", nargs="*", help="input test directories (if no files given)"
)
cmd_parser.add_argument(
"-r", "--result-dir", default=base_path("results"), help="directory for test results"
)
cmd_parser.add_argument(
"-e",
"--exclude",
action=append_filter,
metavar="REGEX",
dest="filters",
help="exclude test by regex on path/name.py",
)
cmd_parser.add_argument(
"-i",
"--include",
action=append_filter,
metavar="REGEX",
dest="filters",
help="include test by regex on path/name.py",
)
cmd_parser.add_argument(
"--write-exp",
action="store_true",
help="use CPython to generate .exp files to run tests w/o CPython",
)
cmd_parser.add_argument(
"--list-tests", action="store_true", help="list tests instead of running them"
)
cmd_parser.add_argument(
"--emit", default="bytecode", help="MicroPython emitter to use (bytecode or native)"
)
cmd_parser.add_argument("--heapsize", help="heapsize to use (use default if not specified)")
cmd_parser.add_argument(
"--via-mpy", action="store_true", help="compile .py files to .mpy first"
)
cmd_parser.add_argument(
"--mpy-cross-flags", default="-mcache-lookup-bc", help="flags to pass to mpy-cross"
)
cmd_parser.add_argument(
"--keep-path", action="store_true", help="do not clear MICROPYPATH when running tests"
)
cmd_parser.add_argument(
"-j",
"--jobs",
default=1,
metavar="N",
type=int,
help="Number of tests to run simultaneously",
)
cmd_parser.add_argument(
"--auto-jobs",
action="store_const",
dest="jobs",
const=multiprocessing.cpu_count(),
help="Set the -j values to the CPU (thread) count",
)
cmd_parser.add_argument("files", nargs="*", help="input test files")
cmd_parser.add_argument(
"--print-failures",
action="store_true",
help="print the diff of expected vs. actual output for failed tests and exit",
)
cmd_parser.add_argument(
"--clean-failures",
action="store_true",
help="delete the .exp and .out files from failed tests and exit",
)
args = cmd_parser.parse_args()
if args.print_failures:
for exp in glob(os.path.join(args.result_dir, "*.exp")):
testbase = exp[:-4]
print()
print("FAILURE {0}".format(testbase))
os.system("{0} {1}.exp {1}.out".format(DIFF, testbase))
sys.exit(0)
if args.clean_failures:
for f in glob(os.path.join(args.result_dir, "*.exp")) + glob(
os.path.join(args.result_dir, "*.out")
):
os.remove(f)
sys.exit(0)
LOCAL_TARGETS = (
"unix",
"qemu-arm",
)
EXTERNAL_TARGETS = ("pyboard", "wipy", "esp8266", "esp32", "minimal", "nrf")
if args.target in LOCAL_TARGETS or args.list_tests:
pyb = None
elif args.target in EXTERNAL_TARGETS:
global pyboard
sys.path.append(base_path("../tools"))
import pyboard
pyb = pyboard.Pyboard(args.device, args.baudrate, args.user, args.password)
pyb.enter_raw_repl()
else:
raise ValueError("target must be one of %s" % ", ".join(LOCAL_TARGETS + EXTERNAL_TARGETS))
if len(args.files) == 0:
if args.test_dirs is None:
test_dirs = (
"basics",
"micropython",
"misc",
"extmod",
)
if args.target == "pyboard":
# run pyboard tests
test_dirs += ("float", "stress", "pyb", "pybnative", "inlineasm")
elif args.target in ("esp8266", "esp32", "minimal", "nrf"):
test_dirs += ("float",)
elif args.target == "wipy":
# run WiPy tests
test_dirs += ("wipy",)
elif args.target == "unix":
# run PC tests
test_dirs += (
"float",
"import",
"io",
"stress",
"unicode",
"unix",
"cmdline",
"../extmod/ulab/tests",
)
elif args.target == "qemu-arm":
if not args.write_exp:
raise ValueError("--target=qemu-arm must be used with --write-exp")
# Generate expected output files for qemu run.
# This list should match the test_dirs tuple in tinytest-codegen.py.
test_dirs += (
"float",
"inlineasm",
"qemu-arm",
)
else:
# run tests from these directories
test_dirs = args.test_dirs
tests = sorted(
test_file
for test_files in (glob("{}/*.py".format(dir)) for dir in test_dirs)
for test_file in test_files
)
else:
# tests explicitly given
tests = args.files
if not args.keep_path:
# clear search path to make sure tests use only builtin modules and those in extmod
os.environ["MICROPYPATH"] = (
os.pathsep + base_path("../extmod") + os.pathsep + base_path(".")
)
try:
os.makedirs(args.result_dir, exist_ok=True)
res = run_tests(pyb, tests, args, args.result_dir, args.jobs)
finally:
if pyb:
pyb.close()
if not res:
sys.exit(1)
if __name__ == "__main__":
main()

View File

@ -82,40 +82,37 @@ function ci_cc3200_build {
########################################################################################
# ports/esp32
function ci_esp32_idf3_setup {
sudo pip3 install pyserial 'pyparsing<2.4'
curl -L https://dl.espressif.com/dl/xtensa-esp32-elf-linux64-1.22.0-80-g6c4433a-5.2.0.tar.gz | tar zxf -
function ci_esp32_setup_helper {
git clone https://github.com/espressif/esp-idf.git
git -C esp-idf checkout $1
git -C esp-idf submodule update --init \
components/bt/controller/lib \
components/bt/host/nimble/nimble \
components/esp_wifi \
components/esptool_py/esptool \
components/lwip/lwip \
components/mbedtls/mbedtls
./esp-idf/install.sh
}
function ci_esp32_idf3_path {
echo $(pwd)/xtensa-esp32-elf/bin
function ci_esp32_idf402_setup {
ci_esp32_setup_helper v4.0.2
}
function ci_esp32_idf3_build {
function ci_esp32_idf43_setup {
ci_esp32_setup_helper v4.3-beta2
}
function ci_esp32_build {
source esp-idf/export.sh
make ${MAKEOPTS} -C mpy-cross
git -C esp-idf checkout $(grep "ESPIDF_SUPHASH_V3 :=" ports/esp32/Makefile | cut -d " " -f 3)
git -C esp-idf submodule update --init components/json/cJSON components/esp32/lib components/esptool_py/esptool components/expat/expat components/lwip/lwip components/mbedtls/mbedtls components/micro-ecc/micro-ecc components/nghttp/nghttp2 components/nimble components/bt
make ${MAKEOPTS} -C ports/esp32 submodules
make ${MAKEOPTS} -C ports/esp32
}
function ci_esp32_idf4_setup {
sudo pip3 install pyserial 'pyparsing<2.4'
curl -L https://dl.espressif.com/dl/xtensa-esp32-elf-gcc8_2_0-esp-2019r2-linux-amd64.tar.gz | tar zxf -
git clone https://github.com/espressif/esp-idf.git
}
function ci_esp32_idf4_path {
echo $(pwd)/xtensa-esp32-elf/bin
}
function ci_esp32_idf4_build {
make ${MAKEOPTS} -C mpy-cross
git -C esp-idf checkout $(grep "ESPIDF_SUPHASH_V4 :=" ports/esp32/Makefile | cut -d " " -f 3)
git -C esp-idf submodule update --init components/bt/controller/lib components/bt/host/nimble/nimble components/esp_wifi/lib_esp32 components/esptool_py/esptool components/lwip/lwip components/mbedtls/mbedtls
make ${MAKEOPTS} -C ports/esp32 submodules
make ${MAKEOPTS} -C ports/esp32
make ${MAKEOPTS} -C ports/esp32 clean
make ${MAKEOPTS} -C ports/esp32 USER_C_MODULES=../../../examples/usercmodule/micropython.cmake
if [ -d $IDF_PATH/components/esp32s2 ]; then
make ${MAKEOPTS} -C ports/esp32 BOARD=GENERIC_S2
fi
}
########################################################################################
@ -195,9 +192,10 @@ function ci_rp2_setup {
function ci_rp2_build {
make ${MAKEOPTS} -C mpy-cross
git submodule update --init lib/pico-sdk
git -C lib/pico-sdk submodule update --init lib/tinyusb
git submodule update --init lib/pico-sdk lib/tinyusb
make ${MAKEOPTS} -C ports/rp2
make ${MAKEOPTS} -C ports/rp2 clean
make ${MAKEOPTS} -C ports/rp2 USER_C_MODULES=../../examples/usercmodule/micropython.cmake
}
########################################################################################
@ -224,7 +222,7 @@ function ci_stm32_pyb_build {
make ${MAKEOPTS} -C mpy-cross
make ${MAKEOPTS} -C ports/stm32 submodules
git submodule update --init lib/btstack
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBV11 MICROPY_PY_WIZNET5K=5200 MICROPY_PY_CC3K=1 USER_C_MODULES=../../examples/usercmodule CFLAGS_EXTRA="-DMODULE_CEXAMPLE_ENABLED=1 -DMODULE_CPPEXAMPLE_ENABLED=1"
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBV11 MICROPY_PY_WIZNET5K=5200 MICROPY_PY_CC3K=1 USER_C_MODULES=../../examples/usercmodule
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBD_SF2
make ${MAKEOPTS} -C ports/stm32 BOARD=PYBD_SF6 NANBOX=1 MICROPY_BLUETOOTH_NIMBLE=0 MICROPY_BLUETOOTH_BTSTACK=1
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=PYBV10 CFLAGS_EXTRA='-DMBOOT_FSLOAD=1 -DMBOOT_VFS_LFS2=1'
@ -240,6 +238,11 @@ function ci_stm32_nucleo_build {
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_L476RG DEBUG=1
make ${MAKEOPTS} -C ports/stm32 BOARD=NUCLEO_WB55
make ${MAKEOPTS} -C ports/stm32/mboot BOARD=NUCLEO_WB55
# Test mboot_pack_dfu.py created a valid file, and that its unpack-dfu command works.
BOARD_WB55=ports/stm32/boards/NUCLEO_WB55
BUILD_WB55=ports/stm32/build-NUCLEO_WB55
python3 ports/stm32/mboot/mboot_pack_dfu.py -k $BOARD_WB55/mboot_keys.h unpack-dfu $BUILD_WB55/firmware.pack.dfu $BUILD_WB55/firmware.unpack.dfu
diff $BUILD_WB55/firmware.unpack.dfu $BUILD_WB55/firmware.dfu
}
########################################################################################
@ -318,7 +321,7 @@ function ci_unix_minimal_build {
}
function ci_unix_minimal_run_tests {
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/micropython-minimal ./run-tests -e exception_chain -e self_type_check -e subclass_native_init -d basics)
(cd tests && MICROPY_CPYTHON3=python3 MICROPY_MICROPYTHON=../ports/unix/micropython-minimal ./run-tests.py -e exception_chain -e self_type_check -e subclass_native_init -d basics)
}
function ci_unix_standard_build {
@ -453,7 +456,7 @@ function ci_unix_macos_run_tests {
# - OSX has poor time resolution and these uasyncio tests do not have correct output
# - import_pkg7 has a problem with relative imports
# - urandom_basic has a problem with getrandbits(0)
(cd tests && ./run-tests --exclude 'uasyncio_(basic|heaplock|lock|wait_task)' --exclude 'import_pkg7.py' --exclude 'urandom_basic.py')
(cd tests && ./run-tests.py --exclude 'uasyncio_(basic|heaplock|lock|wait_task)' --exclude 'import_pkg7.py' --exclude 'urandom_basic.py')
}
########################################################################################
@ -472,27 +475,28 @@ function ci_windows_build {
# ports/zephyr
function ci_zephyr_setup {
docker pull zephyrprojectrtos/ci:v0.11.8
docker pull zephyrprojectrtos/ci:v0.11.13
docker run --name zephyr-ci -d -it \
-v "$(pwd)":/micropython \
-e ZEPHYR_SDK_INSTALL_DIR=/opt/sdk/zephyr-sdk-0.11.3 \
-e ZEPHYR_SDK_INSTALL_DIR=/opt/sdk/zephyr-sdk-0.12.2 \
-e ZEPHYR_TOOLCHAIN_VARIANT=zephyr \
-e ZEPHYR_BASE=/zephyrproject/zephyr \
-w /micropython/ports/zephyr \
zephyrprojectrtos/ci:v0.11.8
zephyrprojectrtos/ci:v0.11.13
docker ps -a
}
function ci_zephyr_install {
docker exec zephyr-ci west init --mr v2.4.0 /zephyrproject
docker exec zephyr-ci west init --mr v2.5.0 /zephyrproject
docker exec -w /zephyrproject zephyr-ci west update
docker exec -w /zephyrproject zephyr-ci west zephyr-export
}
function ci_zephyr_build {
docker exec zephyr-ci bash -c "make clean; ./make-minimal ${MAKEOPTS}"
docker exec zephyr-ci bash -c "make clean; ./make-minimal ${MAKEOPTS} BOARD=frdm_k64f"
docker exec zephyr-ci bash -c "make clean; make ${MAKEOPTS}"
docker exec zephyr-ci bash -c "make clean; make ${MAKEOPTS} BOARD=frdm_k64f"
docker exec zephyr-ci bash -c "make clean; make ${MAKEOPTS} BOARD=mimxrt1050_evk"
docker exec zephyr-ci bash -c "make clean; make ${MAKEOPTS} BOARD=reel_board"
docker exec zephyr-ci west build -p auto -b qemu_x86 -- -DCONF_FILE=prj_minimal.conf
docker exec zephyr-ci west build -p auto -b frdm_k64f -- -DCONF_FILE=prj_minimal.conf
docker exec zephyr-ci west build -p auto -b qemu_x86
docker exec zephyr-ci west build -p auto -b frdm_k64f
docker exec zephyr-ci west build -p auto -b mimxrt1050_evk
docker exec zephyr-ci west build -p auto -b reel_board
}

View File

@ -34,13 +34,27 @@ import subprocess
# Public functions to be used in the manifest
def include(manifest):
def include(manifest, **kwargs):
"""Include another manifest.
The manifest argument can be a string (filename) or an iterable of
strings.
Relative paths are resolved with respect to the current manifest file.
Optional kwargs can be provided which will be available to the
included script via the `options` variable.
e.g. include("path.py", extra_features=True)
in path.py:
options.defaults(standard_features=True)
# freeze minimal modules.
if options.standard_features:
# freeze standard modules.
if options.extra_features:
# freeze extra modules.
"""
if not isinstance(manifest, str):
@ -53,7 +67,7 @@ def include(manifest):
# Applies to includes and input files.
prev_cwd = os.getcwd()
os.chdir(os.path.dirname(manifest))
exec(f.read())
exec(f.read(), globals(), {"options": IncludeOptions(**kwargs)})
os.chdir(prev_cwd)
@ -125,6 +139,18 @@ VARS = {}
manifest_list = []
class IncludeOptions:
def __init__(self, **kwargs):
self._kwargs = kwargs
self._defaults = {}
def defaults(self, **kwargs):
self._defaults = kwargs
def __getattr__(self, name):
return self._kwargs.get(name, self._defaults.get(name, None))
class FreezeError(Exception):
pass

View File

@ -65,7 +65,7 @@ port_data = {
"s": PortData("stm32", "stm32", "build-PYBV10/firmware.elf", "BOARD=PYBV10"),
"c": PortData("cc3200", "cc3200", "build/WIPY/release/application.axf", "BTARGET=application"),
"8": PortData("esp8266", "esp8266", "build-GENERIC/firmware.elf"),
"3": PortData("esp32", "esp32", "build-GENERIC/application.elf"),
"3": PortData("esp32", "esp32", "build-GENERIC/micropython.elf"),
"r": PortData("nrf", "nrf", "build-pca10040/firmware.elf"),
"d": PortData("samd", "samd", "build-ADAFRUIT_ITSYBITSY_M4_EXPRESS/firmware.elf"),
}

View File

@ -632,7 +632,11 @@ def main():
help="Do not follow the output after running the scripts.",
)
cmd_parser.add_argument(
"-f", "--filesystem", action="store_true", help="perform a filesystem action"
"-f",
"--filesystem",
action="store_true",
help="perform a filesystem action: "
"cp local :device | cp :device local | cat path | ls [path] | rm path | mkdir path | rmdir path",
)
cmd_parser.add_argument("files", nargs="*", help="input files")
args = cmd_parser.parse_args()

View File

@ -520,7 +520,7 @@ def write_elements(elements, mass_erase_used, progress=None):
data = elem["data"]
elem_size = size
elem_addr = addr
if progress:
if progress and elem_size:
progress(elem_addr, 0, elem_size)
while size > 0:
write_size = size

View File

@ -57,8 +57,9 @@ def verify(sha):
# Subject line.
subject_line = raw_body[0]
very_verbose("subject_line", subject_line)
if not re.match(r"^[^!]+: [A-Z]+.+ .+\.$", subject_line):
error("Subject line should contain ': ' and end in '.': " + subject_line)
subject_line_format = r"^[^!]+: [A-Z]+.+ .+\.$"
if not re.match(subject_line_format, subject_line):
error("Subject line should match " + repr(subject_line_format) + ": " + subject_line)
if len(subject_line) >= 73:
error("Subject line should be 72 or less characters: " + subject_line)