tests/run-perfbench.py: Allow running tests via mpy and native emitter.
The performance benchmark tests now support `--via-mpy` and `--emit native` on remote targets. For example: $ ./run-perfbench.py -p --via-mpy --emit native 100 100 Signed-off-by: Damien George <damien@micropython.org>
This commit is contained in:
parent
1786dacc83
commit
54ab9d23e9
|
@ -13,6 +13,8 @@ from glob import glob
|
|||
sys.path.append("../tools")
|
||||
import pyboard
|
||||
|
||||
prepare_script_for_target = __import__("run-tests").prepare_script_for_target
|
||||
|
||||
# Paths for host executables
|
||||
if os.name == "nt":
|
||||
CPYTHON3 = os.getenv("MICROPY_CPYTHON3", "python3.exe")
|
||||
|
@ -85,7 +87,7 @@ def run_benchmark_on_target(target, script):
|
|||
return -1, -1, "CRASH: %r" % err
|
||||
|
||||
|
||||
def run_benchmarks(target, param_n, param_m, n_average, test_list):
|
||||
def run_benchmarks(args, target, param_n, param_m, n_average, test_list):
|
||||
skip_complex = run_feature_test(target, "complex") != "complex"
|
||||
skip_native = run_feature_test(target, "native_check") != "native"
|
||||
target_had_error = False
|
||||
|
@ -116,13 +118,19 @@ def run_benchmarks(target, param_n, param_m, n_average, test_list):
|
|||
with open("%s.full" % test_file, "wb") as f:
|
||||
f.write(test_script)
|
||||
|
||||
# Process script through mpy-cross if needed
|
||||
if isinstance(target, pyboard.Pyboard) or args.via_mpy:
|
||||
test_script_target = prepare_script_for_target(args, script_text=test_script)
|
||||
else:
|
||||
test_script_target = test_script
|
||||
|
||||
# Run MicroPython a given number of times
|
||||
times = []
|
||||
scores = []
|
||||
error = None
|
||||
result_out = None
|
||||
for _ in range(n_average):
|
||||
time, norm, result = run_benchmark_on_target(target, test_script)
|
||||
time, norm, result = run_benchmark_on_target(target, test_script_target)
|
||||
if time < 0 or norm < 0:
|
||||
error = result
|
||||
break
|
||||
|
@ -247,6 +255,8 @@ def main():
|
|||
cmd_parser.add_argument(
|
||||
"--emit", default="bytecode", help="MicroPython emitter to use (bytecode or native)"
|
||||
)
|
||||
cmd_parser.add_argument("--via-mpy", action="store_true", help="compile code to .mpy first")
|
||||
cmd_parser.add_argument("--mpy-cross-flags", default="", help="flags to pass to mpy-cross")
|
||||
cmd_parser.add_argument("N", nargs=1, help="N parameter (approximate target CPU frequency)")
|
||||
cmd_parser.add_argument("M", nargs=1, help="M parameter (approximate target heap in kbytes)")
|
||||
cmd_parser.add_argument("files", nargs="*", help="input test files")
|
||||
|
@ -264,6 +274,8 @@ def main():
|
|||
n_average = int(args.average)
|
||||
|
||||
if args.pyboard:
|
||||
if not args.mpy_cross_flags:
|
||||
args.mpy_cross_flags = "-march=armv7m"
|
||||
target = pyboard.Pyboard(args.device)
|
||||
target.enter_raw_repl()
|
||||
else:
|
||||
|
@ -284,7 +296,7 @@ def main():
|
|||
|
||||
print("N={} M={} n_average={}".format(N, M, n_average))
|
||||
|
||||
target_had_error = run_benchmarks(target, N, M, n_average, tests)
|
||||
target_had_error = run_benchmarks(args, target, N, M, n_average, tests)
|
||||
|
||||
if isinstance(target, pyboard.Pyboard):
|
||||
target.exit_raw_repl()
|
||||
|
|
Loading…
Reference in New Issue