commit
c66b808f55
31
.github/workflows/build.yml
vendored
31
.github/workflows/build.yml
vendored
@ -27,9 +27,9 @@ jobs:
|
||||
boards-aarch: ${{ steps.set-matrix.outputs.boards-aarch }}
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: false
|
||||
@ -135,21 +135,27 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
EXCLUDE_COMMIT: ${{ github.event.after }}
|
||||
run: python3 -u ci_changes_per_commit.py
|
||||
- name: Set head sha
|
||||
if: github.event_name == 'pull_request'
|
||||
run: echo "HEAD_SHA=$(git show -s --format=%s $GITHUB_SHA | grep -o -P "(?<=Merge ).*(?= into)")" >> $GITHUB_ENV
|
||||
- name: Set base sha
|
||||
if: github.event_name == 'pull_request'
|
||||
run: |
|
||||
git fetch --no-tags --no-recurse-submodules --depth=$((DEPTH + 1)) origin $HEAD_SHA
|
||||
echo "BASE_SHA=$(git rev-list $HEAD_SHA --skip=$DEPTH --max-count=1)" >> $GITHUB_ENV
|
||||
env:
|
||||
DEPTH: ${{ steps.get-last-commit-with-checks.outputs.commit_depth || github.event.pull_request.commits }}
|
||||
- name: Get changes
|
||||
id: get-changes
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: tj-actions/changed-files@v34
|
||||
with:
|
||||
json: true
|
||||
sha: ${{ steps.get-last-commit-with-checks.outputs.commit && github.event.after }}
|
||||
base_sha: ${{ steps.get-last-commit-with-checks.outputs.commit }}
|
||||
run: echo $(git diff $BASE_SHA...$HEAD_SHA --name-only) | echo "changed_files=[\"$(sed "s/ /\", \"/g")\"]" >> $GITHUB_OUTPUT
|
||||
- name: Set matrix
|
||||
id: set-matrix
|
||||
working-directory: tools
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.get-changes.outputs.all_changed_and_modified_files }}
|
||||
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.checkruns }}
|
||||
run: python3 -u ci_set_matrix.py
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.get-changes.outputs.changed_files }}
|
||||
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.check_runs }}
|
||||
|
||||
|
||||
mpy-cross-mac:
|
||||
@ -412,14 +418,15 @@ jobs:
|
||||
path: ${{ github.workspace }}/.idf_tools
|
||||
key: ${{ runner.os }}-idf-tools-${{ hashFiles('.git/modules/ports/espressif/esp-idf/HEAD') }}-${{ steps.py3.outputs.python-path }}-20220404
|
||||
- name: Clone IDF submodules
|
||||
run: |
|
||||
(cd $IDF_PATH && git submodule update --init)
|
||||
run: git submodule update --init $IDF_PATH
|
||||
env:
|
||||
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
|
||||
- name: Install IDF tools
|
||||
run: |
|
||||
echo "Installing ESP-IDF tools"
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install required
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
|
||||
echo "Installing Python environment and packages"
|
||||
$IDF_PATH/tools/idf_tools.py --non-interactive install-python-env
|
||||
rm -rf $IDF_TOOLS_PATH/dist
|
||||
env:
|
||||
@ -437,7 +444,6 @@ jobs:
|
||||
run: |
|
||||
source $IDF_PATH/export.sh
|
||||
gcc --version
|
||||
xtensa-esp32s2-elf-gcc --version
|
||||
python3 --version
|
||||
ninja --version
|
||||
cmake --version
|
||||
@ -471,6 +477,7 @@ jobs:
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
|
||||
|
||||
|
||||
build-aarch:
|
||||
runs-on: ubuntu-20.04
|
||||
needs: test
|
||||
|
@ -18,7 +18,7 @@ query ($owner: String!, $name: String!, $pullNumber: Int!, $commitsPerPage: Int!
|
||||
}
|
||||
nodes {
|
||||
commit {
|
||||
checkSuites(first: 3) {
|
||||
checkSuites(first: 100) {
|
||||
nodes {
|
||||
conclusion
|
||||
workflowRun {
|
||||
@ -39,7 +39,7 @@ query ($owner: String!, $name: String!, $pullNumber: Int!, $commitsPerPage: Int!
|
||||
}
|
||||
"""
|
||||
|
||||
QUERY_CHECKRUNS = """
|
||||
QUERY_CHECK_RUNS = """
|
||||
query ($checkSuiteID: ID!,
|
||||
$afterFailedRun: String, $afterIncompleteRun: String,
|
||||
$includeFailedRuns: Boolean!, $includeIncompleteRuns: Boolean!) {
|
||||
@ -92,7 +92,7 @@ query_variables_commits = {
|
||||
}
|
||||
|
||||
|
||||
query_variables_checkruns = {
|
||||
query_variables_check_runs = {
|
||||
"checkSuiteID": "",
|
||||
"afterFailedRun": None,
|
||||
"afterIncompleteRun": None,
|
||||
@ -111,13 +111,11 @@ class Query:
|
||||
self.headers = headers
|
||||
|
||||
def paginate(self, page_info, name):
|
||||
has_page = (
|
||||
page_info["hasNextPage"] if name.startswith("after") else page_info["hasPreviousPage"]
|
||||
)
|
||||
has_page = page_info["hasNextPage" if name.startswith("after") else "hasPreviousPage"]
|
||||
if has_page:
|
||||
self.variables[name] = (
|
||||
page_info["endCursor"] if name.startswith("after") else page_info["startCursor"]
|
||||
)
|
||||
self.variables[name] = page_info[
|
||||
"endCursor" if name.startswith("after") else "startCursor"
|
||||
]
|
||||
return has_page
|
||||
|
||||
def fetch(self):
|
||||
@ -141,28 +139,31 @@ def set_output(name, value):
|
||||
print(f"Would set GitHub actions output {name} to '{value}'")
|
||||
|
||||
|
||||
def get_commit_and_checksuite(query_commits):
|
||||
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
|
||||
|
||||
if commits["totalCount"] > 0:
|
||||
for commit in reversed(commits["nodes"]):
|
||||
commit = commit["commit"]
|
||||
commit_sha = commit["oid"]
|
||||
if commit_sha == os.environ["EXCLUDE_COMMIT"]:
|
||||
continue
|
||||
checksuites = commit["checkSuites"]
|
||||
if checksuites["totalCount"] > 0:
|
||||
for checksuite in checksuites["nodes"]:
|
||||
if checksuite["workflowRun"]["workflow"]["name"] == "Build CI":
|
||||
return [
|
||||
commit_sha,
|
||||
checksuite["id"] if checksuite["conclusion"] != "SUCCESS" else None,
|
||||
]
|
||||
else:
|
||||
if query_commits.paginate(commits["pageInfo"], "beforeCommit"):
|
||||
return get_commit_and_checksuite(query_commits)
|
||||
|
||||
return [None, None]
|
||||
def get_commit_depth_and_check_suite(query_commits):
|
||||
commit_depth = 0
|
||||
while True:
|
||||
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
|
||||
if commits["totalCount"] > 0:
|
||||
nodes = commits["nodes"]
|
||||
nodes.reverse()
|
||||
if nodes[0]["commit"]["oid"] == os.environ["EXCLUDE_COMMIT"]:
|
||||
nodes.pop(0)
|
||||
for commit in nodes:
|
||||
commit_depth += 1
|
||||
commit = commit["commit"]
|
||||
commit_sha = commit["oid"]
|
||||
check_suites = commit["checkSuites"]
|
||||
if check_suites["totalCount"] > 0:
|
||||
for check_suite in check_suites["nodes"]:
|
||||
if check_suite["workflowRun"]["workflow"]["name"] == "Build CI":
|
||||
return [
|
||||
{"sha": commit_sha, "depth": commit_depth},
|
||||
check_suite["id"]
|
||||
if check_suite["conclusion"] != "SUCCESS"
|
||||
else None,
|
||||
]
|
||||
if not query_commits.paginate(commits["pageInfo"], "beforeCommit"):
|
||||
return [None, None]
|
||||
|
||||
|
||||
def append_runs_to_list(runs, bad_runs_by_matrix):
|
||||
@ -180,53 +181,61 @@ def append_runs_to_list(runs, bad_runs_by_matrix):
|
||||
bad_runs_by_matrix[matrix].append(res_board.group()[1:-1])
|
||||
|
||||
|
||||
def get_bad_checkruns(query_checkruns):
|
||||
def get_bad_check_runs(query_check_runs):
|
||||
more_pages = True
|
||||
bad_runs_by_matrix = {}
|
||||
run_types = ["failed", "incomplete"]
|
||||
|
||||
while more_pages:
|
||||
checkruns = query_checkruns.fetch()["data"]["node"]
|
||||
run_types = ["failed", "incomplete"]
|
||||
check_runs = query_check_runs.fetch()["data"]["node"]
|
||||
more_pages = False
|
||||
|
||||
for run_type in run_types:
|
||||
run_type_camel = run_type.capitalize() + "Run"
|
||||
run_type = run_type + "Runs"
|
||||
|
||||
append_runs_to_list(checkruns[run_type], bad_runs_by_matrix)
|
||||
append_runs_to_list(check_runs[run_type], bad_runs_by_matrix)
|
||||
|
||||
if query_checkruns.paginate(checkruns[run_type]["pageInfo"], "after" + run_type_camel):
|
||||
query_checkruns.variables["include" + run_type_camel] = True
|
||||
if query_check_runs.paginate(
|
||||
check_runs[run_type]["pageInfo"], "after" + run_type_camel
|
||||
):
|
||||
query_check_runs.variables["include" + run_type_camel] = True
|
||||
more_pages = True
|
||||
|
||||
return bad_runs_by_matrix
|
||||
|
||||
|
||||
def set_commit(commit):
|
||||
set_output("commit_sha", commit["sha"])
|
||||
set_output("commit_depth", commit["depth"])
|
||||
|
||||
|
||||
def main():
|
||||
query_commits = Query(QUERY_COMMITS, query_variables_commits, headers)
|
||||
query_commits.variables["owner"], query_commits.variables["name"] = os.environ["REPO"].split(
|
||||
"/"
|
||||
)
|
||||
|
||||
commit, checksuite = get_commit_and_checksuite(query_commits)
|
||||
commit, check_suite = get_commit_depth_and_check_suite(query_commits)
|
||||
|
||||
if checksuite is None:
|
||||
if commit is None:
|
||||
print("No checkSuites found -> Abort")
|
||||
if not check_suite:
|
||||
if commit:
|
||||
set_commit(commit)
|
||||
else:
|
||||
set_output("commit", commit)
|
||||
print("Abort: No check suite found")
|
||||
quit()
|
||||
|
||||
query_checkruns = Query(QUERY_CHECKRUNS, query_variables_checkruns, headers)
|
||||
query_checkruns.variables["checkSuiteID"] = checksuite
|
||||
query_check_runs = Query(QUERY_CHECK_RUNS, query_variables_check_runs, headers)
|
||||
query_check_runs.variables["checkSuiteID"] = check_suite
|
||||
|
||||
checkruns = get_bad_checkruns(query_checkruns)
|
||||
check_runs = get_bad_check_runs(query_check_runs)
|
||||
|
||||
if len(checkruns) == 0:
|
||||
print("No checkRuns found -> Abort")
|
||||
if not check_runs:
|
||||
print("Abort: No check runs found")
|
||||
quit()
|
||||
|
||||
set_output("commit", commit)
|
||||
set_output("checkruns", json.dumps(checkruns))
|
||||
set_commit(commit)
|
||||
set_output("check_runs", json.dumps(check_runs))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -70,6 +70,8 @@ run(
|
||||
"Fetch back to the start of 2021 to get commit history",
|
||||
f'git fetch --recurse-submodules=no --shallow-since="2021-07-01" origin {ref}',
|
||||
)
|
||||
# See https://stackoverflow.com/questions/63878612/git-fatal-error-in-object-unshallow-sha-1#comment118418373_63879454
|
||||
run('Fix for bug "fatal: error in object: unshallow"', "git repack -d")
|
||||
run("Init submodules", "git submodule init")
|
||||
run("Submodule status", "git submodule status")
|
||||
|
||||
|
@ -26,6 +26,7 @@ import os
|
||||
import sys
|
||||
import json
|
||||
import pathlib
|
||||
import subprocess
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
tools_dir = pathlib.Path(__file__).resolve().parent
|
||||
@ -82,7 +83,7 @@ else:
|
||||
last_failed_jobs = json.loads(j)
|
||||
|
||||
|
||||
def set_output(name, value):
|
||||
def set_output(name: str, value):
|
||||
if "GITHUB_OUTPUT" in os.environ:
|
||||
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
|
||||
print(f"{name}={value}", file=f)
|
||||
@ -90,7 +91,7 @@ def set_output(name, value):
|
||||
print(f"Would set GitHub actions output {name} to '{value}'")
|
||||
|
||||
|
||||
def set_boards_to_build(build_all):
|
||||
def set_boards_to_build(build_all: bool):
|
||||
# Get boards in json format
|
||||
boards_info_json = build_board_info.get_board_mapping()
|
||||
all_board_ids = set()
|
||||
@ -228,23 +229,34 @@ def set_boards_to_build(build_all):
|
||||
set_output(f"boards-{arch}", json.dumps(sorted(arch_to_boards[arch])))
|
||||
|
||||
|
||||
def set_docs_to_build(build_all):
|
||||
if "build-doc" in last_failed_jobs:
|
||||
build_all = True
|
||||
|
||||
doc_match = build_all
|
||||
if not build_all:
|
||||
doc_pattern = re.compile(
|
||||
r"^(?:.github/workflows/|docs|extmod/ulab|(?:(?:ports/\w+/bindings|shared-bindings)\S+\.c|conf\.py|tools/extract_pyi\.py|requirements-doc\.txt)$)|(?:-stubs|\.(?:md|MD|rst|RST))$"
|
||||
)
|
||||
for p in changed_files:
|
||||
if doc_pattern.search(p):
|
||||
doc_match = True
|
||||
break
|
||||
def set_docs_to_build(build_doc: bool):
|
||||
if not build_doc:
|
||||
if "build-doc" in last_failed_jobs:
|
||||
build_doc = True
|
||||
else:
|
||||
doc_pattern = re.compile(
|
||||
r"^(?:\.github\/workflows\/|docs|extmod\/ulab|(?:(?:ports\/\w+\/bindings|shared-bindings)\S+\.c|conf\.py|tools\/extract_pyi\.py|requirements-doc\.txt)$)|(?:-stubs|\.(?:md|MD|rst|RST))$"
|
||||
)
|
||||
github_workspace = os.environ.get("GITHUB_WORKSPACE") or ""
|
||||
github_workspace = github_workspace and github_workspace + "/"
|
||||
for p in changed_files:
|
||||
if doc_pattern.search(p) and (
|
||||
(
|
||||
subprocess.run(
|
||||
f"git diff -U0 $BASE_SHA...$HEAD_SHA {github_workspace + p} | grep -o -m 1 '^[+-]\/\/|'",
|
||||
capture_output=True,
|
||||
shell=True,
|
||||
).stdout
|
||||
)
|
||||
if p.endswith(".c")
|
||||
else True
|
||||
):
|
||||
build_doc = True
|
||||
break
|
||||
|
||||
# Set the step outputs
|
||||
print("Building docs:", doc_match)
|
||||
set_output("build-doc", doc_match)
|
||||
print("Building docs:", build_doc)
|
||||
set_output("build-doc", build_doc)
|
||||
|
||||
|
||||
def check_changed_files():
|
||||
|
Loading…
x
Reference in New Issue
Block a user