2022-10-26 08:49:15 -04:00
|
|
|
#! /usr/bin/env python3
|
|
|
|
|
|
|
|
# SPDX-FileCopyrightText: 2021 microDev
|
|
|
|
#
|
|
|
|
# SPDX-License-Identifier: MIT
|
|
|
|
|
|
|
|
# GraphQL Query
|
|
|
|
|
|
|
|
QUERY_COMMITS = """
|
|
|
|
query ($owner: String!, $name: String!, $pullNumber: Int!, $commitsPerPage: Int!, $beforeCommit: String) {
|
|
|
|
repository(owner: $owner, name: $name) {
|
|
|
|
pullRequest(number: $pullNumber) {
|
|
|
|
commits(last: $commitsPerPage, before: $beforeCommit) {
|
|
|
|
totalCount
|
|
|
|
pageInfo {
|
|
|
|
startCursor
|
|
|
|
hasPreviousPage
|
|
|
|
}
|
|
|
|
nodes {
|
|
|
|
commit {
|
2023-01-07 01:25:40 -05:00
|
|
|
checkSuites(first: 100) {
|
2022-10-26 08:49:15 -04:00
|
|
|
nodes {
|
|
|
|
conclusion
|
|
|
|
workflowRun {
|
|
|
|
workflow {
|
|
|
|
name
|
|
|
|
}
|
|
|
|
}
|
|
|
|
id
|
|
|
|
}
|
|
|
|
totalCount
|
|
|
|
}
|
|
|
|
oid
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
2023-01-07 01:20:31 -05:00
|
|
|
QUERY_CHECK_RUNS = """
|
2022-10-26 08:49:15 -04:00
|
|
|
query ($checkSuiteID: ID!,
|
|
|
|
$afterFailedRun: String, $afterIncompleteRun: String,
|
|
|
|
$includeFailedRuns: Boolean!, $includeIncompleteRuns: Boolean!) {
|
|
|
|
node(id: $checkSuiteID) {
|
|
|
|
... on CheckSuite {
|
|
|
|
failedRuns: checkRuns(
|
|
|
|
first: 100
|
|
|
|
after: $afterFailedRun
|
|
|
|
filterBy: {checkType: LATEST, conclusions: [ACTION_REQUIRED, TIMED_OUT, CANCELLED, FAILURE, NEUTRAL, STARTUP_FAILURE]}
|
|
|
|
) @include(if: $includeFailedRuns) {
|
|
|
|
nodes {
|
|
|
|
name
|
|
|
|
}
|
|
|
|
pageInfo {
|
|
|
|
endCursor
|
|
|
|
hasNextPage
|
|
|
|
}
|
|
|
|
}
|
|
|
|
incompleteRuns: checkRuns(
|
|
|
|
first: 100
|
|
|
|
after: $afterIncompleteRun
|
|
|
|
filterBy: {checkType: LATEST, statuses: [QUEUED, IN_PROGRESS, WAITING, PENDING, REQUESTED]}
|
|
|
|
) @include(if: $includeIncompleteRuns) {
|
|
|
|
nodes {
|
|
|
|
name
|
|
|
|
}
|
|
|
|
pageInfo {
|
|
|
|
endCursor
|
|
|
|
hasNextPage
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import json
|
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
|
|
query_variables_commits = {
|
|
|
|
"owner": "",
|
|
|
|
"name": "",
|
|
|
|
"pullNumber": int(os.environ["PULL"]),
|
|
|
|
"commitsPerPage": 20,
|
|
|
|
"beforeCommit": None,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-01-07 01:20:31 -05:00
|
|
|
query_variables_check_runs = {
|
2022-10-26 08:49:15 -04:00
|
|
|
"checkSuiteID": "",
|
|
|
|
"afterFailedRun": None,
|
|
|
|
"afterIncompleteRun": None,
|
|
|
|
"includeFailedRuns": True,
|
|
|
|
"includeIncompleteRuns": True,
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
headers = {"Authorization": f"Bearer {os.environ['GITHUB_TOKEN']}"}
|
|
|
|
|
|
|
|
|
|
|
|
class Query:
|
|
|
|
def __init__(self, query, variables={}, headers={}):
|
|
|
|
self.query = query
|
|
|
|
self.variables = variables
|
|
|
|
self.headers = headers
|
|
|
|
|
|
|
|
def paginate(self, page_info, name):
|
2023-01-09 07:47:51 -05:00
|
|
|
has_page = page_info["hasNextPage" if name.startswith("after") else "hasPreviousPage"]
|
2022-10-26 08:49:15 -04:00
|
|
|
if has_page:
|
2023-01-09 07:47:51 -05:00
|
|
|
self.variables[name] = page_info[
|
|
|
|
"endCursor" if name.startswith("after") else "startCursor"
|
|
|
|
]
|
2022-10-26 08:49:15 -04:00
|
|
|
return has_page
|
|
|
|
|
|
|
|
def fetch(self):
|
|
|
|
request = requests.post(
|
|
|
|
"https://api.github.com/graphql",
|
|
|
|
json={"query": self.query, "variables": self.variables},
|
|
|
|
headers=self.headers,
|
|
|
|
)
|
|
|
|
if request.status_code == 200:
|
|
|
|
return request.json()
|
|
|
|
else:
|
2022-11-22 12:42:59 -05:00
|
|
|
print(request.json())
|
2022-10-26 08:49:15 -04:00
|
|
|
raise Exception("Query Failed: {}".format(request.status_code))
|
|
|
|
|
|
|
|
|
|
|
|
def set_output(name, value):
|
|
|
|
if "GITHUB_OUTPUT" in os.environ:
|
|
|
|
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
|
|
|
|
print(f"{name}={value}", file=f)
|
|
|
|
else:
|
|
|
|
print(f"Would set GitHub actions output {name} to '{value}'")
|
|
|
|
|
|
|
|
|
2023-01-07 01:25:40 -05:00
|
|
|
def get_commit_depth_and_check_suite(query_commits):
|
2023-01-09 07:47:51 -05:00
|
|
|
commit_depth = 0
|
2023-01-07 01:25:40 -05:00
|
|
|
while True:
|
2023-01-07 08:30:54 -05:00
|
|
|
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
|
|
|
|
if commits["totalCount"] > 0:
|
|
|
|
nodes = commits["nodes"]
|
|
|
|
nodes.reverse()
|
|
|
|
if nodes[0]["commit"]["oid"] == os.environ["EXCLUDE_COMMIT"]:
|
|
|
|
nodes.pop(0)
|
2023-01-09 07:47:51 -05:00
|
|
|
for commit in nodes:
|
|
|
|
commit_depth += 1
|
2023-01-07 08:30:54 -05:00
|
|
|
commit = commit["commit"]
|
|
|
|
commit_sha = commit["oid"]
|
|
|
|
check_suites = commit["checkSuites"]
|
|
|
|
if check_suites["totalCount"] > 0:
|
|
|
|
for check_suite in check_suites["nodes"]:
|
|
|
|
if check_suite["workflowRun"]["workflow"]["name"] == "Build CI":
|
|
|
|
return [
|
2023-01-09 07:47:51 -05:00
|
|
|
{"sha": commit_sha, "depth": commit_depth},
|
2023-01-07 08:30:54 -05:00
|
|
|
check_suite["id"]
|
|
|
|
if check_suite["conclusion"] != "SUCCESS"
|
|
|
|
else None,
|
|
|
|
]
|
2023-01-09 07:47:51 -05:00
|
|
|
if not query_commits.paginate(commits["pageInfo"], "beforeCommit"):
|
|
|
|
return [None, None]
|
2022-10-26 08:49:15 -04:00
|
|
|
|
|
|
|
|
2023-01-07 01:20:31 -05:00
|
|
|
def get_bad_check_runs(query_check_runs):
|
2023-03-04 00:02:19 -05:00
|
|
|
bad_runs = {}
|
2022-11-22 12:42:59 -05:00
|
|
|
more_pages = True
|
2023-02-18 05:59:59 -05:00
|
|
|
|
2023-01-09 07:47:51 -05:00
|
|
|
run_types = ["failed", "incomplete"]
|
2023-03-10 06:11:13 -05:00
|
|
|
have_dependent_jobs = ["scheduler", "mpy-cross", "tests"]
|
2023-02-18 05:59:59 -05:00
|
|
|
|
2022-11-22 12:42:59 -05:00
|
|
|
while more_pages:
|
2023-01-07 01:20:31 -05:00
|
|
|
check_runs = query_check_runs.fetch()["data"]["node"]
|
2022-11-22 12:42:59 -05:00
|
|
|
more_pages = False
|
2022-10-26 08:49:15 -04:00
|
|
|
|
2022-11-22 12:42:59 -05:00
|
|
|
for run_type in run_types:
|
|
|
|
run_type_camel = run_type.capitalize() + "Run"
|
|
|
|
run_type = run_type + "Runs"
|
2022-10-26 08:49:15 -04:00
|
|
|
|
2023-02-18 05:59:59 -05:00
|
|
|
for check_run in check_runs[run_type]["nodes"]:
|
|
|
|
name = check_run["name"]
|
2023-03-10 06:11:13 -05:00
|
|
|
|
|
|
|
if any([name.startswith(job) for job in have_dependent_jobs]):
|
|
|
|
return {}
|
|
|
|
|
|
|
|
if name.startswith("ports"):
|
2023-03-04 00:02:19 -05:00
|
|
|
matrix_job = name.rsplit(" (", 1)[1][:-1]
|
2023-03-10 06:11:13 -05:00
|
|
|
bad_runs.setdefault("ports", []).append(matrix_job)
|
2023-02-18 05:59:59 -05:00
|
|
|
else:
|
2023-03-10 06:11:13 -05:00
|
|
|
bad_runs[name] = True
|
2022-10-26 08:49:15 -04:00
|
|
|
|
2023-01-07 08:30:54 -05:00
|
|
|
if query_check_runs.paginate(
|
|
|
|
check_runs[run_type]["pageInfo"], "after" + run_type_camel
|
|
|
|
):
|
2023-01-07 01:20:31 -05:00
|
|
|
query_check_runs.variables["include" + run_type_camel] = True
|
2022-11-22 12:42:59 -05:00
|
|
|
more_pages = True
|
2022-10-26 08:49:15 -04:00
|
|
|
|
2023-03-04 00:02:19 -05:00
|
|
|
return bad_runs
|
2022-10-26 08:49:15 -04:00
|
|
|
|
|
|
|
|
2023-01-07 01:25:40 -05:00
|
|
|
def set_commit(commit):
|
|
|
|
set_output("commit_sha", commit["sha"])
|
|
|
|
set_output("commit_depth", commit["depth"])
|
|
|
|
|
|
|
|
|
2022-10-26 08:49:15 -04:00
|
|
|
def main():
|
|
|
|
query_commits = Query(QUERY_COMMITS, query_variables_commits, headers)
|
|
|
|
query_commits.variables["owner"], query_commits.variables["name"] = os.environ["REPO"].split(
|
|
|
|
"/"
|
|
|
|
)
|
|
|
|
|
2023-01-07 01:25:40 -05:00
|
|
|
commit, check_suite = get_commit_depth_and_check_suite(query_commits)
|
2022-10-26 08:49:15 -04:00
|
|
|
|
2023-01-09 07:47:51 -05:00
|
|
|
if not check_suite:
|
|
|
|
if commit:
|
2023-01-07 01:25:40 -05:00
|
|
|
set_commit(commit)
|
2023-01-09 07:47:51 -05:00
|
|
|
else:
|
|
|
|
print("Abort: No check suite found")
|
2022-10-26 08:49:15 -04:00
|
|
|
quit()
|
|
|
|
|
2023-01-07 01:20:31 -05:00
|
|
|
query_check_runs = Query(QUERY_CHECK_RUNS, query_variables_check_runs, headers)
|
|
|
|
query_check_runs.variables["checkSuiteID"] = check_suite
|
2022-10-26 08:49:15 -04:00
|
|
|
|
2023-01-07 01:20:31 -05:00
|
|
|
check_runs = get_bad_check_runs(query_check_runs)
|
2022-10-26 08:49:15 -04:00
|
|
|
|
2023-01-09 07:47:51 -05:00
|
|
|
if not check_runs:
|
2023-01-07 01:20:31 -05:00
|
|
|
print("Abort: No check runs found")
|
2022-10-26 08:49:15 -04:00
|
|
|
quit()
|
|
|
|
|
2023-01-07 01:25:40 -05:00
|
|
|
set_commit(commit)
|
2023-01-07 01:20:31 -05:00
|
|
|
set_output("check_runs", json.dumps(check_runs))
|
2022-10-26 08:49:15 -04:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|