Skip to content

Commit c66b808

Browse files
authored
Merge pull request #7428 from MicroDev1/ci
Multiple CI Enhancements
2 parents 72f4a8e + 4a9d934 commit c66b808

File tree

4 files changed

+108
-78
lines changed

4 files changed

+108
-78
lines changed

.github/workflows/build.yml

Lines changed: 19 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ jobs:
2727
boards-aarch: ${{ steps.set-matrix.outputs.boards-aarch }}
2828
steps:
2929
- name: Dump GitHub context
30+
run: echo "$GITHUB_CONTEXT"
3031
env:
3132
GITHUB_CONTEXT: ${{ toJson(github) }}
32-
run: echo "$GITHUB_CONTEXT"
3333
- uses: actions/checkout@v3
3434
with:
3535
submodules: false
@@ -135,21 +135,27 @@ jobs:
135135
GITHUB_TOKEN: ${{ github.token }}
136136
EXCLUDE_COMMIT: ${{ github.event.after }}
137137
run: python3 -u ci_changes_per_commit.py
138+
- name: Set head sha
139+
if: github.event_name == 'pull_request'
140+
run: echo "HEAD_SHA=$(git show -s --format=%s $GITHUB_SHA | grep -o -P "(?<=Merge ).*(?= into)")" >> $GITHUB_ENV
141+
- name: Set base sha
142+
if: github.event_name == 'pull_request'
143+
run: |
144+
git fetch --no-tags --no-recurse-submodules --depth=$((DEPTH + 1)) origin $HEAD_SHA
145+
echo "BASE_SHA=$(git rev-list $HEAD_SHA --skip=$DEPTH --max-count=1)" >> $GITHUB_ENV
146+
env:
147+
DEPTH: ${{ steps.get-last-commit-with-checks.outputs.commit_depth || github.event.pull_request.commits }}
138148
- name: Get changes
139149
id: get-changes
140150
if: github.event_name == 'pull_request'
141-
uses: tj-actions/changed-files@v34
142-
with:
143-
json: true
144-
sha: ${{ steps.get-last-commit-with-checks.outputs.commit && github.event.after }}
145-
base_sha: ${{ steps.get-last-commit-with-checks.outputs.commit }}
151+
run: echo $(git diff $BASE_SHA...$HEAD_SHA --name-only) | echo "changed_files=[\"$(sed "s/ /\", \"/g")\"]" >> $GITHUB_OUTPUT
146152
- name: Set matrix
147153
id: set-matrix
148154
working-directory: tools
149-
env:
150-
CHANGED_FILES: ${{ steps.get-changes.outputs.all_changed_and_modified_files }}
151-
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.checkruns }}
152155
run: python3 -u ci_set_matrix.py
156+
env:
157+
CHANGED_FILES: ${{ steps.get-changes.outputs.changed_files }}
158+
LAST_FAILED_JOBS: ${{ steps.get-last-commit-with-checks.outputs.check_runs }}
153159

154160

155161
mpy-cross-mac:
@@ -412,14 +418,15 @@ jobs:
412418
path: ${{ github.workspace }}/.idf_tools
413419
key: ${{ runner.os }}-idf-tools-${{ hashFiles('.git/modules/ports/espressif/esp-idf/HEAD') }}-${{ steps.py3.outputs.python-path }}-20220404
414420
- name: Clone IDF submodules
415-
run: |
416-
(cd $IDF_PATH && git submodule update --init)
421+
run: git submodule update --init $IDF_PATH
417422
env:
418423
IDF_PATH: ${{ github.workspace }}/ports/espressif/esp-idf
419424
- name: Install IDF tools
420425
run: |
426+
echo "Installing ESP-IDF tools"
421427
$IDF_PATH/tools/idf_tools.py --non-interactive install required
422428
$IDF_PATH/tools/idf_tools.py --non-interactive install cmake
429+
echo "Installing Python environment and packages"
423430
$IDF_PATH/tools/idf_tools.py --non-interactive install-python-env
424431
rm -rf $IDF_TOOLS_PATH/dist
425432
env:
@@ -437,7 +444,6 @@ jobs:
437444
run: |
438445
source $IDF_PATH/export.sh
439446
gcc --version
440-
xtensa-esp32s2-elf-gcc --version
441447
python3 --version
442448
ninja --version
443449
cmake --version
@@ -471,6 +477,7 @@ jobs:
471477
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
472478
if: (github.event_name == 'push' && github.ref == 'refs/heads/main' && github.repository_owner == 'adafruit') || (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'rerequested'))
473479

480+
474481
build-aarch:
475482
runs-on: ubuntu-20.04
476483
needs: test

tools/ci_changes_per_commit.py

Lines changed: 58 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
}
1919
nodes {
2020
commit {
21-
checkSuites(first: 3) {
21+
checkSuites(first: 100) {
2222
nodes {
2323
conclusion
2424
workflowRun {
@@ -39,7 +39,7 @@
3939
}
4040
"""
4141

42-
QUERY_CHECKRUNS = """
42+
QUERY_CHECK_RUNS = """
4343
query ($checkSuiteID: ID!,
4444
$afterFailedRun: String, $afterIncompleteRun: String,
4545
$includeFailedRuns: Boolean!, $includeIncompleteRuns: Boolean!) {
@@ -92,7 +92,7 @@
9292
}
9393

9494

95-
query_variables_checkruns = {
95+
query_variables_check_runs = {
9696
"checkSuiteID": "",
9797
"afterFailedRun": None,
9898
"afterIncompleteRun": None,
@@ -111,13 +111,11 @@ def __init__(self, query, variables={}, headers={}):
111111
self.headers = headers
112112

113113
def paginate(self, page_info, name):
114-
has_page = (
115-
page_info["hasNextPage"] if name.startswith("after") else page_info["hasPreviousPage"]
116-
)
114+
has_page = page_info["hasNextPage" if name.startswith("after") else "hasPreviousPage"]
117115
if has_page:
118-
self.variables[name] = (
119-
page_info["endCursor"] if name.startswith("after") else page_info["startCursor"]
120-
)
116+
self.variables[name] = page_info[
117+
"endCursor" if name.startswith("after") else "startCursor"
118+
]
121119
return has_page
122120

123121
def fetch(self):
@@ -141,28 +139,31 @@ def set_output(name, value):
141139
print(f"Would set GitHub actions output {name} to '{value}'")
142140

143141

144-
def get_commit_and_checksuite(query_commits):
145-
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
146-
147-
if commits["totalCount"] > 0:
148-
for commit in reversed(commits["nodes"]):
149-
commit = commit["commit"]
150-
commit_sha = commit["oid"]
151-
if commit_sha == os.environ["EXCLUDE_COMMIT"]:
152-
continue
153-
checksuites = commit["checkSuites"]
154-
if checksuites["totalCount"] > 0:
155-
for checksuite in checksuites["nodes"]:
156-
if checksuite["workflowRun"]["workflow"]["name"] == "Build CI":
157-
return [
158-
commit_sha,
159-
checksuite["id"] if checksuite["conclusion"] != "SUCCESS" else None,
160-
]
161-
else:
162-
if query_commits.paginate(commits["pageInfo"], "beforeCommit"):
163-
return get_commit_and_checksuite(query_commits)
164-
165-
return [None, None]
142+
def get_commit_depth_and_check_suite(query_commits):
143+
commit_depth = 0
144+
while True:
145+
commits = query_commits.fetch()["data"]["repository"]["pullRequest"]["commits"]
146+
if commits["totalCount"] > 0:
147+
nodes = commits["nodes"]
148+
nodes.reverse()
149+
if nodes[0]["commit"]["oid"] == os.environ["EXCLUDE_COMMIT"]:
150+
nodes.pop(0)
151+
for commit in nodes:
152+
commit_depth += 1
153+
commit = commit["commit"]
154+
commit_sha = commit["oid"]
155+
check_suites = commit["checkSuites"]
156+
if check_suites["totalCount"] > 0:
157+
for check_suite in check_suites["nodes"]:
158+
if check_suite["workflowRun"]["workflow"]["name"] == "Build CI":
159+
return [
160+
{"sha": commit_sha, "depth": commit_depth},
161+
check_suite["id"]
162+
if check_suite["conclusion"] != "SUCCESS"
163+
else None,
164+
]
165+
if not query_commits.paginate(commits["pageInfo"], "beforeCommit"):
166+
return [None, None]
166167

167168

168169
def append_runs_to_list(runs, bad_runs_by_matrix):
@@ -180,53 +181,61 @@ def append_runs_to_list(runs, bad_runs_by_matrix):
180181
bad_runs_by_matrix[matrix].append(res_board.group()[1:-1])
181182

182183

183-
def get_bad_checkruns(query_checkruns):
184+
def get_bad_check_runs(query_check_runs):
184185
more_pages = True
185186
bad_runs_by_matrix = {}
187+
run_types = ["failed", "incomplete"]
188+
186189
while more_pages:
187-
checkruns = query_checkruns.fetch()["data"]["node"]
188-
run_types = ["failed", "incomplete"]
190+
check_runs = query_check_runs.fetch()["data"]["node"]
189191
more_pages = False
190192

191193
for run_type in run_types:
192194
run_type_camel = run_type.capitalize() + "Run"
193195
run_type = run_type + "Runs"
194196

195-
append_runs_to_list(checkruns[run_type], bad_runs_by_matrix)
197+
append_runs_to_list(check_runs[run_type], bad_runs_by_matrix)
196198

197-
if query_checkruns.paginate(checkruns[run_type]["pageInfo"], "after" + run_type_camel):
198-
query_checkruns.variables["include" + run_type_camel] = True
199+
if query_check_runs.paginate(
200+
check_runs[run_type]["pageInfo"], "after" + run_type_camel
201+
):
202+
query_check_runs.variables["include" + run_type_camel] = True
199203
more_pages = True
200204

201205
return bad_runs_by_matrix
202206

203207

208+
def set_commit(commit):
209+
set_output("commit_sha", commit["sha"])
210+
set_output("commit_depth", commit["depth"])
211+
212+
204213
def main():
205214
query_commits = Query(QUERY_COMMITS, query_variables_commits, headers)
206215
query_commits.variables["owner"], query_commits.variables["name"] = os.environ["REPO"].split(
207216
"/"
208217
)
209218

210-
commit, checksuite = get_commit_and_checksuite(query_commits)
219+
commit, check_suite = get_commit_depth_and_check_suite(query_commits)
211220

212-
if checksuite is None:
213-
if commit is None:
214-
print("No checkSuites found -> Abort")
221+
if not check_suite:
222+
if commit:
223+
set_commit(commit)
215224
else:
216-
set_output("commit", commit)
225+
print("Abort: No check suite found")
217226
quit()
218227

219-
query_checkruns = Query(QUERY_CHECKRUNS, query_variables_checkruns, headers)
220-
query_checkruns.variables["checkSuiteID"] = checksuite
228+
query_check_runs = Query(QUERY_CHECK_RUNS, query_variables_check_runs, headers)
229+
query_check_runs.variables["checkSuiteID"] = check_suite
221230

222-
checkruns = get_bad_checkruns(query_checkruns)
231+
check_runs = get_bad_check_runs(query_check_runs)
223232

224-
if len(checkruns) == 0:
225-
print("No checkRuns found -> Abort")
233+
if not check_runs:
234+
print("Abort: No check runs found")
226235
quit()
227236

228-
set_output("commit", commit)
229-
set_output("checkruns", json.dumps(checkruns))
237+
set_commit(commit)
238+
set_output("check_runs", json.dumps(check_runs))
230239

231240

232241
if __name__ == "__main__":

tools/ci_fetch_deps.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,8 @@ def run(title, command, check=True):
7070
"Fetch back to the start of 2021 to get commit history",
7171
f'git fetch --recurse-submodules=no --shallow-since="2021-07-01" origin {ref}',
7272
)
73+
# See https://stackoverflow.com/questions/63878612/git-fatal-error-in-object-unshallow-sha-1#comment118418373_63879454
74+
run('Fix for bug "fatal: error in object: unshallow"', "git repack -d")
7375
run("Init submodules", "git submodule init")
7476
run("Submodule status", "git submodule status")
7577

tools/ci_set_matrix.py

Lines changed: 29 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import sys
2727
import json
2828
import pathlib
29+
import subprocess
2930
from concurrent.futures import ThreadPoolExecutor
3031

3132
tools_dir = pathlib.Path(__file__).resolve().parent
@@ -82,15 +83,15 @@
8283
last_failed_jobs = json.loads(j)
8384

8485

85-
def set_output(name, value):
86+
def set_output(name: str, value):
8687
if "GITHUB_OUTPUT" in os.environ:
8788
with open(os.environ["GITHUB_OUTPUT"], "at") as f:
8889
print(f"{name}={value}", file=f)
8990
else:
9091
print(f"Would set GitHub actions output {name} to '{value}'")
9192

9293

93-
def set_boards_to_build(build_all):
94+
def set_boards_to_build(build_all: bool):
9495
# Get boards in json format
9596
boards_info_json = build_board_info.get_board_mapping()
9697
all_board_ids = set()
@@ -228,23 +229,34 @@ def get_settings(board):
228229
set_output(f"boards-{arch}", json.dumps(sorted(arch_to_boards[arch])))
229230

230231

231-
def set_docs_to_build(build_all):
232-
if "build-doc" in last_failed_jobs:
233-
build_all = True
234-
235-
doc_match = build_all
236-
if not build_all:
237-
doc_pattern = re.compile(
238-
r"^(?:.github/workflows/|docs|extmod/ulab|(?:(?:ports/\w+/bindings|shared-bindings)\S+\.c|conf\.py|tools/extract_pyi\.py|requirements-doc\.txt)$)|(?:-stubs|\.(?:md|MD|rst|RST))$"
239-
)
240-
for p in changed_files:
241-
if doc_pattern.search(p):
242-
doc_match = True
243-
break
232+
def set_docs_to_build(build_doc: bool):
233+
if not build_doc:
234+
if "build-doc" in last_failed_jobs:
235+
build_doc = True
236+
else:
237+
doc_pattern = re.compile(
238+
r"^(?:\.github\/workflows\/|docs|extmod\/ulab|(?:(?:ports\/\w+\/bindings|shared-bindings)\S+\.c|conf\.py|tools\/extract_pyi\.py|requirements-doc\.txt)$)|(?:-stubs|\.(?:md|MD|rst|RST))$"
239+
)
240+
github_workspace = os.environ.get("GITHUB_WORKSPACE") or ""
241+
github_workspace = github_workspace and github_workspace + "/"
242+
for p in changed_files:
243+
if doc_pattern.search(p) and (
244+
(
245+
subprocess.run(
246+
f"git diff -U0 $BASE_SHA...$HEAD_SHA {github_workspace + p} | grep -o -m 1 '^[+-]\/\/|'",
247+
capture_output=True,
248+
shell=True,
249+
).stdout
250+
)
251+
if p.endswith(".c")
252+
else True
253+
):
254+
build_doc = True
255+
break
244256

245257
# Set the step outputs
246-
print("Building docs:", doc_match)
247-
set_output("build-doc", doc_match)
258+
print("Building docs:", build_doc)
259+
set_output("build-doc", build_doc)
248260

249261

250262
def check_changed_files():

0 commit comments

Comments
 (0)