Merge pull request #7132 from MicroDev1/ci
CI: Schedule PR jobs based on commit specific changes
This commit is contained in:
commit
c525322a29
|
@ -125,20 +125,29 @@ jobs:
|
|||
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static-raspbian s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-raspbian-${{ env.CP_VERSION }} --no-progress --region us-east-1
|
||||
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-amd64-linux-${{ env.CP_VERSION }} --no-progress --region us-east-1
|
||||
[ -z "$AWS_ACCESS_KEY_ID" ] || aws s3 cp mpy-cross/mpy-cross.static.exe s3://adafruit-circuit-python/bin/mpy-cross/mpy-cross.static-x64-windows-${{ env.CP_VERSION }}.exe --no-progress --region us-east-1
|
||||
- name: "Get changes"
|
||||
- name: Get last commit with checks
|
||||
id: get-last-commit-with-checks
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: dorny/paths-filter@v2
|
||||
id: filter
|
||||
working-directory: tools
|
||||
env:
|
||||
REPO: ${{ github.repository }}
|
||||
PULL: ${{ github.event.number }}
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
EXCLUDE_COMMIT: ${{ github.event.after }}
|
||||
run: python3 -u ci_changes_per_commit.py
|
||||
- name: Get changes
|
||||
id: get-changes
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: tj-actions/changed-files@v34
|
||||
with:
|
||||
list-files: json
|
||||
filters: |
|
||||
changed:
|
||||
- '**'
|
||||
- name: "Set matrix"
|
||||
json: "true"
|
||||
base_sha: ${{ steps.get-last-commit-with-checks.outputs.commit }}
|
||||
- name: Set matrix
|
||||
id: set-matrix
|
||||
working-directory: tools
|
||||
env:
|
||||
CHANGED_FILES: ${{ steps.filter.outputs.changed_files }}
|
||||
CHANGED_FILES: ${{ toJSON(steps.get-changes.outputs.all_changed_and_modified_files) }}
|
||||
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.checkruns }}
|
||||
run: python3 -u ci_set_matrix.py
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,227 @@
|
|||
#! /usr/bin/env python3
|
||||
|
||||
# SPDX-FileCopyrightText: 2021 microDev
|
||||
#
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# GraphQL Query
|
||||
|
||||
QUERY_COMMITS = """
|
||||
query ($owner: String!, $name: String!, $pullNumber: Int!, $commitsPerPage: Int!, $beforeCommit: String) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
pullRequest(number: $pullNumber) {
|
||||
commits(last: $commitsPerPage, before: $beforeCommit) {
|
||||
totalCount
|
||||
pageInfo {
|
||||
startCursor
|
||||
hasPreviousPage
|
||||
}
|
||||
nodes {
|
||||
commit {
|
||||
checkSuites(first: 3) {
|
||||
nodes {
|
||||
conclusion
|
||||
workflowRun {
|
||||
workflow {
|
||||
name
|
||||
}
|
||||
}
|
||||
id
|
||||
}
|
||||
totalCount
|
||||
}
|
||||
oid
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
QUERY_CHECKRUNS = """
|
||||
query ($checkSuiteID: ID!,
|
||||
$afterFailedRun: String, $afterIncompleteRun: String,
|
||||
$includeFailedRuns: Boolean!, $includeIncompleteRuns: Boolean!) {
|
||||
node(id: $checkSuiteID) {
|
||||
... on CheckSuite {
|
||||
failedRuns: checkRuns(
|
||||
first: 100
|
||||
after: $afterFailedRun
|
||||
filterBy: {checkType: LATEST, conclusions: [ACTION_REQUIRED, TIMED_OUT, CANCELLED, FAILURE, NEUTRAL, STARTUP_FAILURE]}
|
||||
) @include(if: $includeFailedRuns) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
pageInfo {
|
||||
endCursor
|
||||
hasNextPage
|
||||
}
|
||||
}
|
||||
incompleteRuns: checkRuns(
|
||||
first: 100
|
||||
after: $afterIncompleteRun
|
||||
filterBy: {checkType: LATEST, statuses: [QUEUED, IN_PROGRESS, WAITING, PENDING, REQUESTED]}
|
||||
) @include(if: $includeIncompleteRuns) {
|
||||
nodes {
|
||||
name
|
||||
}
|
||||
pageInfo {
|
||||
endCursor
|
||||
hasNextPage
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import requests
|
||||
|
||||
|
||||
query_variables_commits = {
|
||||
"owner": "",
|
||||
"name": "",
|
||||
"pullNumber": int(os.environ["PULL"]),
|
||||
"commitsPerPage": 20,
|
||||
"beforeCommit": None,
|
||||
}
|
||||
|
||||
|
||||
query_variables_checkruns = {
|
||||
"checkSuiteID": "",
|
||||
"afterFailedRun": None,
|
||||
"afterIncompleteRun": None,
|
||||
"includeFailedRuns": True,
|
||||
"includeIncompleteRuns": True,
|
||||
}
|
||||
|
||||
|
||||
headers = {"Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}"}
|
||||
|
||||
|
||||
class Query:
|
||||
def __init__(self, query, variables={}, headers={}):
|
||||
self.query = query
|
||||
self.variables = variables
|
||||
self.headers = headers
|
||||
|
||||
def paginate(self, page_info, name):
|
||||
has_page = (
|
||||
page_info["hasNextPage"] if name.startswith("after") else page_info["hasPreviousPage"]
|
||||
)
|
||||
if has_page:
|
||||
self.variables[name] = (
|
||||
page_info["endCursor"] if name.startswith("after") else page_info["startCursor"]
|
||||
)
|
||||
return has_page
|
||||
|
||||
def fetch(self):
|
||||
request = requests.post(
|
||||
"https://api.github.com/graphql",
|
||||
json={"query": self.query, "variables": self.variables},
|
||||
headers=self.headers,
|
||||
)
|
||||
if request.status_code == 200:
|
||||
return request.json()
|
||||
else:
|
||||
raise Exception("Query Failed: {}".format(request.status_code))
|
||||
|
||||
|
||||
def set_output(name, value):
|
||||
if "GITHUB_OUTPUT" in os.environ:
|
||||
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
|
||||
print(f"{name}={value}", file=f)
|
||||
else:
|
||||
print(f"Would set GitHub actions output {name} to '{value}'")
|
||||
|
||||
|
||||
def get_commit_and_checksuite(query_commits):
|
||||
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
|
||||
|
||||
if commits["totalCount"] > 0:
|
||||
for commit in reversed(commits["nodes"]):
|
||||
commit = commit["commit"]
|
||||
commit_sha = commit["oid"]
|
||||
if commit_sha == os.environ["EXCLUDE_COMMIT"]:
|
||||
continue
|
||||
checksuites = commit["checkSuites"]
|
||||
if checksuites["totalCount"] > 0:
|
||||
for checksuite in checksuites["nodes"]:
|
||||
if checksuite["workflowRun"]["workflow"]["name"] == "Build CI":
|
||||
return [
|
||||
commit_sha,
|
||||
checksuite["id"] if checksuite["conclusion"] != "SUCCESS" else None,
|
||||
]
|
||||
else:
|
||||
if query_commits.paginate(commits["pageInfo"], "beforeCommit"):
|
||||
return get_commit_and_checksuite(query_commits)
|
||||
|
||||
return [None, None]
|
||||
|
||||
|
||||
def append_runs_to_list(runs, list):
|
||||
regex_matrix = re.compile("^build-[^ ]+")
|
||||
regex_board = re.compile("\([^ ]+\)$")
|
||||
for run in runs["nodes"]:
|
||||
name = run["name"]
|
||||
res_matrix = regex_matrix.search(name)
|
||||
if res_matrix:
|
||||
matrix = res_matrix.group()
|
||||
if matrix not in list:
|
||||
list[matrix] = []
|
||||
list[matrix].append(regex_board.search(name).group()[1:-1])
|
||||
|
||||
|
||||
def get_bad_checkruns(query_checkruns, list={}):
|
||||
checkruns = query_checkruns.fetch()["data"]["node"]
|
||||
run_types = ["failed", "incomplete"]
|
||||
paginate = False
|
||||
|
||||
for run_type in run_types:
|
||||
run_type_camel = run_type.capitalize() + "Run"
|
||||
run_type = run_type + "Runs"
|
||||
|
||||
append_runs_to_list(checkruns[run_type], list)
|
||||
|
||||
if query_checkruns.paginate(checkruns[run_type]["pageInfo"], "after" + run_type_camel):
|
||||
query_checkruns.variables["include" + run_type_camel] = True
|
||||
paginate = True
|
||||
|
||||
return get_bad_checkruns(query_checkruns, list) if paginate else list
|
||||
|
||||
|
||||
def main():
|
||||
query_commits = Query(QUERY_COMMITS, query_variables_commits, headers)
|
||||
query_commits.variables["owner"], query_commits.variables["name"] = os.environ["REPO"].split(
|
||||
"/"
|
||||
)
|
||||
|
||||
commit, checksuite = get_commit_and_checksuite(query_commits)
|
||||
|
||||
if checksuite is None:
|
||||
if commit is None:
|
||||
print("No checkSuites found -> Abort")
|
||||
else:
|
||||
set_output("commit", commit)
|
||||
quit()
|
||||
|
||||
query_checkruns = Query(QUERY_CHECKRUNS, query_variables_checkruns, headers)
|
||||
query_checkruns.variables["checkSuiteID"] = checksuite
|
||||
|
||||
checkruns = get_bad_checkruns(query_checkruns)
|
||||
|
||||
if len(checkruns) == 0:
|
||||
print("No checkRuns found -> Abort")
|
||||
quit()
|
||||
|
||||
set_output("commit", commit)
|
||||
set_output("checkruns", json.dumps(checkruns))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -25,7 +25,6 @@ import re
|
|||
import os
|
||||
import sys
|
||||
import json
|
||||
import yaml
|
||||
import pathlib
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
|
@ -62,6 +61,7 @@ IGNORE = [
|
|||
if len(sys.argv) > 1:
|
||||
print("Using files list on commandline")
|
||||
changed_files = sys.argv[1:]
|
||||
last_failed_jobs = {}
|
||||
else:
|
||||
c = os.environ["CHANGED_FILES"]
|
||||
if c == "":
|
||||
|
@ -69,7 +69,14 @@ else:
|
|||
changed_files = []
|
||||
else:
|
||||
print("Using files list in CHANGED_FILES")
|
||||
changed_files = json.loads(os.environ["CHANGED_FILES"])
|
||||
changed_files = json.loads(c)
|
||||
|
||||
j = os.environ["LAST_FAILED_JOBS"]
|
||||
if j == "":
|
||||
print("LAST_FAILED_JOBS is in environment, but value is empty")
|
||||
last_failed_jobs = {}
|
||||
else:
|
||||
last_failed_jobs = json.loads(j)
|
||||
|
||||
|
||||
def set_output(name, value):
|
||||
|
@ -196,7 +203,7 @@ def set_boards_to_build(build_all):
|
|||
# Split boards by architecture.
|
||||
print("Building boards:")
|
||||
arch_to_boards = {"aarch": [], "arm": [], "riscv": [], "espressif": []}
|
||||
for board in sorted(boards_to_build):
|
||||
for board in boards_to_build:
|
||||
print(" ", board)
|
||||
port = board_to_port.get(board)
|
||||
# A board can appear due to its _deletion_ (rare)
|
||||
|
@ -208,10 +215,20 @@ def set_boards_to_build(build_all):
|
|||
|
||||
# Set the step outputs for each architecture
|
||||
for arch in arch_to_boards:
|
||||
# Append previous failed jobs
|
||||
if f"build-{arch}" in last_failed_jobs:
|
||||
failed_boards = last_failed_jobs[f"build-{arch}"]
|
||||
for board in failed_boards:
|
||||
if not board in arch_to_boards[arch]:
|
||||
arch_to_boards[arch].append(board)
|
||||
# Set Output
|
||||
set_output(f"boards-{arch}", json.dumps(sorted(arch_to_boards[arch])))
|
||||
|
||||
|
||||
def set_docs_to_build(build_all):
|
||||
if "build-doc" in last_failed_jobs:
|
||||
build_all = True
|
||||
|
||||
doc_match = build_all
|
||||
if not build_all:
|
||||
doc_pattern = re.compile(
|
||||
|
@ -224,7 +241,7 @@ def set_docs_to_build(build_all):
|
|||
|
||||
# Set the step outputs
|
||||
print("Building docs:", doc_match)
|
||||
set_output(f"build-doc", doc_match)
|
||||
set_output("build-doc", doc_match)
|
||||
|
||||
|
||||
def check_changed_files():
|
||||
|
|
Loading…
Reference in New Issue