2021-09-15 05:35:05 -04:00
|
|
|
#! /usr/bin/env python3
|
|
|
|
|
|
|
|
# SPDX-FileCopyrightText: 2021 Scott Shawcroft
|
|
|
|
# SPDX-FileCopyrightText: 2021 microDev
|
|
|
|
#
|
|
|
|
# SPDX-License-Identifier: MIT
|
|
|
|
|
|
|
|
"""
|
|
|
|
This script is used in GitHub Actions to determine what docs/boards are
|
|
|
|
built based on what files were changed. The base commit varies depending
|
|
|
|
on the event that triggered run. Pull request runs will compare to the
|
|
|
|
base branch while pushes will compare to the current ref. We override this
|
|
|
|
for the adafruit/circuitpython repo so we build all docs/boards for pushes.
|
|
|
|
|
2022-10-14 12:23:47 -04:00
|
|
|
When making changes to the script it is useful to manually test it.
|
|
|
|
You can for instance run
|
|
|
|
```shell
|
|
|
|
tools/ci_set_matrix ports/raspberrypi/common-hal/socket/SSLSocket.c
|
|
|
|
```
|
|
|
|
and (at the time this comment was written) get a series of messages indicating
|
|
|
|
that only the single board raspberry_pi_pico_w would be built.
|
2021-09-15 05:35:05 -04:00
|
|
|
"""
|
|
|
|
|
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import json
|
2022-10-14 10:23:35 -04:00
|
|
|
import pathlib
|
2023-01-07 07:47:48 -05:00
|
|
|
import subprocess
|
2022-10-14 12:07:46 -04:00
|
|
|
from concurrent.futures import ThreadPoolExecutor
|
2022-10-14 10:23:35 -04:00
|
|
|
|
|
|
|
tools_dir = pathlib.Path(__file__).resolve().parent
|
|
|
|
top_dir = tools_dir.parent
|
|
|
|
|
|
|
|
sys.path.insert(0, str(tools_dir / "adabot"))
|
|
|
|
sys.path.insert(0, str(top_dir / "docs"))
|
2021-09-15 05:35:05 -04:00
|
|
|
|
|
|
|
import build_board_info
|
2022-10-14 12:07:46 -04:00
|
|
|
from shared_bindings_matrix import (
|
|
|
|
get_settings_from_makefile,
|
|
|
|
SUPPORTED_PORTS,
|
|
|
|
all_ports_all_boards,
|
|
|
|
)
|
2021-09-15 05:35:05 -04:00
|
|
|
|
|
|
|
PORT_TO_ARCH = {
|
|
|
|
"atmel-samd": "arm",
|
2021-08-04 19:27:54 -04:00
|
|
|
"broadcom": "aarch",
|
2021-09-15 05:35:05 -04:00
|
|
|
"cxd56": "arm",
|
|
|
|
"espressif": "espressif",
|
|
|
|
"litex": "riscv",
|
|
|
|
"mimxrt10xx": "arm",
|
|
|
|
"nrf": "arm",
|
|
|
|
"raspberrypi": "arm",
|
|
|
|
"stm": "arm",
|
|
|
|
}
|
|
|
|
|
2021-12-10 13:28:49 -05:00
|
|
|
IGNORE = [
|
|
|
|
"tools/ci_set_matrix.py",
|
|
|
|
"tools/ci_check_duplicate_usb_vid_pid.py",
|
|
|
|
]
|
|
|
|
|
2022-12-21 09:18:33 -05:00
|
|
|
# Files in these directories never influence board builds
|
2022-12-21 07:27:23 -05:00
|
|
|
IGNORE_DIRS = ["tests", "docs", ".devcontainer"]
|
|
|
|
|
2022-10-14 10:23:35 -04:00
|
|
|
if len(sys.argv) > 1:
|
|
|
|
print("Using files list on commandline")
|
|
|
|
changed_files = sys.argv[1:]
|
2022-10-26 08:49:15 -04:00
|
|
|
last_failed_jobs = {}
|
2022-10-14 10:23:35 -04:00
|
|
|
else:
|
|
|
|
c = os.environ["CHANGED_FILES"]
|
|
|
|
if c == "":
|
|
|
|
print("CHANGED_FILES is in environment, but value is empty")
|
|
|
|
changed_files = []
|
|
|
|
else:
|
|
|
|
print("Using files list in CHANGED_FILES")
|
2022-11-19 12:59:05 -05:00
|
|
|
changed_files = json.loads(c.replace("\\", ""))
|
2022-10-26 08:49:15 -04:00
|
|
|
|
|
|
|
j = os.environ["LAST_FAILED_JOBS"]
|
|
|
|
if j == "":
|
|
|
|
print("LAST_FAILED_JOBS is in environment, but value is empty")
|
|
|
|
last_failed_jobs = {}
|
|
|
|
else:
|
|
|
|
last_failed_jobs = json.loads(j)
|
2021-09-15 05:35:05 -04:00
|
|
|
|
|
|
|
|
2023-01-11 01:22:19 -05:00
|
|
|
def set_output(name: str, value):
|
2022-10-12 14:45:30 -04:00
|
|
|
if "GITHUB_OUTPUT" in os.environ:
|
|
|
|
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
|
|
|
|
print(f"{name}={value}", file=f)
|
|
|
|
else:
|
2022-10-14 10:23:35 -04:00
|
|
|
print(f"Would set GitHub actions output {name} to '{value}'")
|
2022-10-12 14:45:30 -04:00
|
|
|
|
|
|
|
|
2023-01-11 01:22:19 -05:00
|
|
|
def set_boards_to_build(build_all: bool):
|
2021-09-15 05:35:05 -04:00
|
|
|
# Get boards in json format
|
|
|
|
boards_info_json = build_board_info.get_board_mapping()
|
|
|
|
all_board_ids = set()
|
|
|
|
port_to_boards = {}
|
|
|
|
board_to_port = {}
|
2022-07-26 18:33:13 -04:00
|
|
|
board_settings = {}
|
2021-09-15 05:35:05 -04:00
|
|
|
for board_id in boards_info_json:
|
|
|
|
info = boards_info_json[board_id]
|
|
|
|
if info.get("alias", False):
|
|
|
|
continue
|
|
|
|
all_board_ids.add(board_id)
|
|
|
|
port = info["port"]
|
|
|
|
if port not in port_to_boards:
|
|
|
|
port_to_boards[port] = set()
|
|
|
|
port_to_boards[port].add(board_id)
|
|
|
|
board_to_port[board_id] = port
|
|
|
|
|
2022-10-14 12:19:54 -04:00
|
|
|
def compute_board_settings(boards):
|
|
|
|
need = set(boards) - set(board_settings.keys())
|
|
|
|
if not need:
|
2022-10-14 12:07:46 -04:00
|
|
|
return
|
|
|
|
|
2022-10-14 12:19:54 -04:00
|
|
|
def get_settings(board):
|
2022-10-14 12:07:46 -04:00
|
|
|
return (
|
|
|
|
board,
|
|
|
|
get_settings_from_makefile(str(top_dir / "ports" / board_to_port[board]), board),
|
|
|
|
)
|
|
|
|
|
|
|
|
with ThreadPoolExecutor(max_workers=os.cpu_count()) as ex:
|
2022-10-14 12:19:54 -04:00
|
|
|
board_settings.update(ex.map(get_settings, need))
|
2022-10-14 12:07:46 -04:00
|
|
|
|
2021-09-15 05:35:05 -04:00
|
|
|
boards_to_build = all_board_ids
|
|
|
|
|
|
|
|
if not build_all:
|
|
|
|
boards_to_build = set()
|
2021-12-17 10:34:47 -05:00
|
|
|
board_pattern = re.compile(r"^ports/[^/]+/boards/([^/]+)/")
|
|
|
|
port_pattern = re.compile(r"^ports/([^/]+)/")
|
2022-10-14 12:32:58 -04:00
|
|
|
module_pattern = re.compile(
|
2022-10-14 12:23:47 -04:00
|
|
|
r"^(ports/[^/]+/(?:common-hal|bindings)|shared-bindings|shared-module)/([^/]+)/"
|
2022-10-14 12:32:58 -04:00
|
|
|
)
|
2021-09-15 05:35:05 -04:00
|
|
|
for p in changed_files:
|
|
|
|
# See if it is board specific
|
|
|
|
board_matches = board_pattern.search(p)
|
|
|
|
if board_matches:
|
|
|
|
board = board_matches.group(1)
|
|
|
|
boards_to_build.add(board)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# See if it is port specific
|
|
|
|
port_matches = port_pattern.search(p)
|
2022-10-14 12:28:29 -04:00
|
|
|
port = port_matches.group(1) if port_matches else None
|
2022-07-26 18:33:13 -04:00
|
|
|
module_matches = module_pattern.search(p)
|
2022-10-14 12:28:29 -04:00
|
|
|
if port and not module_matches:
|
2021-11-12 14:09:33 -05:00
|
|
|
if port != "unix":
|
|
|
|
boards_to_build.update(port_to_boards[port])
|
2021-09-15 05:35:05 -04:00
|
|
|
continue
|
2021-12-11 18:02:55 -05:00
|
|
|
|
2021-12-10 13:28:49 -05:00
|
|
|
# Check the ignore list to see if the file isn't used on board builds.
|
|
|
|
if p in IGNORE:
|
|
|
|
continue
|
2021-12-11 18:02:55 -05:00
|
|
|
|
2022-12-21 07:27:23 -05:00
|
|
|
if any([p.startswith(d) for d in IGNORE_DIRS]):
|
2021-12-10 13:28:49 -05:00
|
|
|
continue
|
2021-09-15 05:35:05 -04:00
|
|
|
|
2022-07-26 18:33:13 -04:00
|
|
|
# As a (nearly) last resort, for some certain files, we compute the settings from the
|
|
|
|
# makefile for each board and determine whether to build them that way.
|
|
|
|
if p.startswith("frozen") or p.startswith("supervisor") or module_matches:
|
2022-10-14 12:28:29 -04:00
|
|
|
if port:
|
2022-10-14 12:19:54 -04:00
|
|
|
board_ids = port_to_boards[port]
|
|
|
|
else:
|
|
|
|
board_ids = all_board_ids
|
|
|
|
compute_board_settings(board_ids)
|
|
|
|
for board in board_ids:
|
2022-07-26 18:33:13 -04:00
|
|
|
settings = board_settings[board]
|
|
|
|
|
|
|
|
# Check frozen files to see if they are in each board.
|
|
|
|
frozen = settings.get("FROZEN_MPY_DIRS", "")
|
|
|
|
if frozen and p.startswith("frozen") and p in frozen:
|
|
|
|
boards_to_build.add(board)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Check supervisor files. This is useful for limiting workflow changes to the
|
|
|
|
# relevant boards.
|
|
|
|
supervisor = settings["SRC_SUPERVISOR"]
|
|
|
|
if p.startswith("supervisor"):
|
|
|
|
if p in supervisor:
|
|
|
|
boards_to_build.add(board)
|
|
|
|
continue
|
|
|
|
|
|
|
|
web_workflow = settings["CIRCUITPY_WEB_WORKFLOW"]
|
|
|
|
while web_workflow.startswith("$("):
|
|
|
|
web_workflow = settings[web_workflow[2:-1]]
|
|
|
|
if (
|
|
|
|
p.startswith("supervisor/shared/web_workflow/static/")
|
|
|
|
and web_workflow != "0"
|
|
|
|
):
|
|
|
|
boards_to_build.add(board)
|
|
|
|
continue
|
|
|
|
|
|
|
|
# Check module matches
|
|
|
|
if module_matches:
|
|
|
|
module = module_matches.group(2) + "/"
|
|
|
|
if module in settings["SRC_PATTERNS"]:
|
|
|
|
boards_to_build.add(board)
|
|
|
|
continue
|
|
|
|
continue
|
|
|
|
|
2021-09-15 05:35:05 -04:00
|
|
|
# Otherwise build it all
|
|
|
|
boards_to_build = all_board_ids
|
|
|
|
break
|
|
|
|
|
|
|
|
# Split boards by architecture.
|
|
|
|
print("Building boards:")
|
2021-08-04 19:27:54 -04:00
|
|
|
arch_to_boards = {"aarch": [], "arm": [], "riscv": [], "espressif": []}
|
2022-11-19 12:59:05 -05:00
|
|
|
for board in sorted(boards_to_build):
|
2021-09-15 05:35:05 -04:00
|
|
|
print(" ", board)
|
2021-10-26 15:06:07 -04:00
|
|
|
port = board_to_port.get(board)
|
|
|
|
# A board can appear due to its _deletion_ (rare)
|
|
|
|
# if this happens it's not in `board_to_port`.
|
|
|
|
if not port:
|
|
|
|
continue
|
|
|
|
arch = PORT_TO_ARCH[port]
|
2021-09-15 05:35:05 -04:00
|
|
|
arch_to_boards[arch].append(board)
|
|
|
|
|
|
|
|
# Set the step outputs for each architecture
|
|
|
|
for arch in arch_to_boards:
|
2022-10-26 08:49:15 -04:00
|
|
|
# Append previous failed jobs
|
|
|
|
if f"build-{arch}" in last_failed_jobs:
|
|
|
|
failed_boards = last_failed_jobs[f"build-{arch}"]
|
|
|
|
for board in failed_boards:
|
|
|
|
if not board in arch_to_boards[arch]:
|
2022-11-19 12:59:05 -05:00
|
|
|
print(" ", board)
|
2022-10-26 08:49:15 -04:00
|
|
|
arch_to_boards[arch].append(board)
|
|
|
|
# Set Output
|
2022-10-12 14:45:30 -04:00
|
|
|
set_output(f"boards-{arch}", json.dumps(sorted(arch_to_boards[arch])))
|
2021-09-15 05:35:05 -04:00
|
|
|
|
|
|
|
|
2023-01-11 01:22:19 -05:00
|
|
|
def set_docs_to_build(build_doc: bool):
|
|
|
|
if not build_doc:
|
|
|
|
if "build-doc" in last_failed_jobs:
|
|
|
|
build_doc = True
|
|
|
|
else:
|
|
|
|
doc_pattern = re.compile(
|
|
|
|
r"^(?:.github/workflows/|docs|extmod/ulab|(?:(?:ports/\w+/bindings|shared-bindings)\S+\.c|conf\.py|tools/extract_pyi\.py|requirements-doc\.txt)$)|(?:-stubs|\.(?:md|MD|rst|RST))$"
|
|
|
|
)
|
|
|
|
github_workspace = os.environ.get("GITHUB_WORKSPACE") or ""
|
|
|
|
github_workspace = github_workspace and github_workspace + "/"
|
|
|
|
for p in changed_files:
|
|
|
|
if doc_pattern.search(p) and (
|
|
|
|
(
|
|
|
|
subprocess.run(
|
|
|
|
f"git diff -U0 $BASE_SHA...$HEAD_SHA {github_workspace + p} | grep -o -m 1 '^[+-]\/\/|'",
|
|
|
|
capture_output=True,
|
|
|
|
shell=True,
|
|
|
|
).stdout
|
|
|
|
)
|
|
|
|
if p.endswith(".c")
|
|
|
|
else True
|
|
|
|
):
|
|
|
|
build_doc = True
|
|
|
|
break
|
2021-09-15 05:35:05 -04:00
|
|
|
|
|
|
|
# Set the step outputs
|
2023-01-11 01:22:19 -05:00
|
|
|
print("Building docs:", build_doc)
|
|
|
|
set_output("build-doc", build_doc)
|
2021-09-15 05:35:05 -04:00
|
|
|
|
|
|
|
|
|
|
|
def check_changed_files():
|
2021-09-16 13:00:40 -04:00
|
|
|
if not changed_files:
|
|
|
|
print("Building all docs/boards")
|
2021-09-15 05:35:05 -04:00
|
|
|
return True
|
|
|
|
else:
|
|
|
|
print("Adding docs/boards to build based on changed files")
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
build_all = check_changed_files()
|
|
|
|
set_docs_to_build(build_all)
|
|
|
|
set_boards_to_build(build_all)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|