Skip to content

Commit 9045d55

Browse files
committed
test runner tags
1 parent 44ef88d commit 9045d55

File tree

9 files changed

+525
-140
lines changed

9 files changed

+525
-140
lines changed

.gitlab-ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,4 +22,4 @@ workflow:
2222

2323
include:
2424
- ".gitlab/workflows/common.yml"
25-
- ".gitlab/workflows/hardware_tests.yml"
25+
- ".gitlab/workflows/hardware_tests_dynamic.yml"

.gitlab/scripts/gen_hw_jobs.py

Lines changed: 264 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,264 @@
1+
#!/usr/bin/env python3
2+
import argparse
3+
import json
4+
import os
5+
import sys
6+
from pathlib import Path
7+
8+
import yaml # PyYAML is available in python image
9+
10+
11+
# Resolve repository root from this script location: .gitlab/scripts -> esp32 root
12+
SCRIPT_DIR = Path(__file__).resolve().parent
13+
REPO_ROOT = SCRIPT_DIR.parent.parent
14+
15+
# Ensure we run from repo root so relative paths work consistently
16+
try:
17+
os.chdir(REPO_ROOT)
18+
except Exception:
19+
pass
20+
21+
TESTS_ROOT = REPO_ROOT / "tests"
22+
23+
24+
def read_json(p: Path):
25+
try:
26+
with p.open("r", encoding="utf-8") as f:
27+
return json.load(f)
28+
except Exception:
29+
return {}
30+
31+
32+
def find_tests() -> list[Path]:
33+
tests = []
34+
if not TESTS_ROOT.exists():
35+
return tests
36+
for ci in TESTS_ROOT.rglob("ci.json"):
37+
if ci.is_file():
38+
tests.append(ci)
39+
return tests
40+
41+
42+
def load_tags_for_test(ci_json: dict, chip: str) -> set[str]:
43+
tags = set()
44+
# Global tags
45+
for key in ("tags"):
46+
v = ci_json.get(key)
47+
if isinstance(v, list):
48+
for e in v:
49+
if isinstance(e, str) and e.strip():
50+
tags.add(e.strip())
51+
# Per-SoC tags
52+
soc_tags = ci_json.get("soc_tags")
53+
if isinstance(soc_tags, dict):
54+
v = soc_tags.get(chip)
55+
if isinstance(v, list):
56+
for e in v:
57+
if isinstance(e, str) and e.strip():
58+
tags.add(e.strip())
59+
return tags
60+
61+
62+
def test_enabled_for_target(ci_json: dict, chip: str) -> bool:
63+
targets = ci_json.get("targets")
64+
if isinstance(targets, dict):
65+
v = targets.get(chip)
66+
if v is False:
67+
return False
68+
return True
69+
70+
71+
def platform_allowed(ci_json: dict, platform: str = "hardware") -> bool:
72+
platforms = ci_json.get("platforms")
73+
if isinstance(platforms, dict):
74+
v = platforms.get(platform)
75+
if v is False:
76+
return False
77+
return True
78+
79+
80+
def sketch_name_from_ci(ci_path: Path) -> str:
81+
# The sketch directory holds .ino named as the directory
82+
sketch_dir = ci_path.parent
83+
return sketch_dir.name
84+
85+
86+
def sdkconfig_path_for(chip: str, sketch: str, ci_json: dict) -> Path:
87+
# Match logic from tests_run.sh: if multiple FQBN entries -> build0.tmp
88+
fqbn = ci_json.get("fqbn", {}) if isinstance(ci_json, dict) else {}
89+
length = 0
90+
if isinstance(fqbn, dict):
91+
v = fqbn.get(chip)
92+
if isinstance(v, list):
93+
length = len(v)
94+
if length <= 1:
95+
return Path.home() / f".arduino/tests/{chip}/{sketch}/build.tmp/sdkconfig"
96+
return Path.home() / f".arduino/tests/{chip}/{sketch}/build0.tmp/sdkconfig"
97+
98+
99+
def sdk_meets_requirements(sdkconfig: Path, ci_json: dict) -> bool:
100+
# Mirror check_requirements in sketch_utils.sh
101+
if not sdkconfig.exists():
102+
# Build might have been skipped or failed; allow parent to skip scheduling
103+
return False
104+
try:
105+
requires = ci_json.get("requires") or []
106+
requires_any = ci_json.get("requires_any") or []
107+
content = sdkconfig.read_text(encoding="utf-8", errors="ignore")
108+
# AND requirements
109+
for req in requires:
110+
if not isinstance(req, str):
111+
continue
112+
if not any(line.startswith(req) for line in content.splitlines()):
113+
return False
114+
# OR requirements
115+
if requires_any:
116+
ok = any(
117+
any(line.startswith(req) for line in content.splitlines())
118+
for req in requires_any if isinstance(req, str)
119+
)
120+
if not ok:
121+
return False
122+
return True
123+
except Exception:
124+
return False
125+
126+
127+
def main():
128+
ap = argparse.ArgumentParser()
129+
ap.add_argument("--chips", required=True, help="Comma-separated list of SoCs")
130+
ap.add_argument("--types", required=False, default="validation",
131+
help="Comma-separated test type directories under tests/")
132+
ap.add_argument("--out", required=True, help="Output YAML path for child pipeline")
133+
ap.add_argument("--dry-run", action="store_true", help="Print planned groups/jobs and skip sdkconfig requirement checks")
134+
args = ap.parse_args()
135+
136+
chips = [c.strip() for c in args.chips.split(",") if c.strip()]
137+
types = [t.strip() for t in args.types.split(",") if t.strip()]
138+
139+
# Aggregate mapping: (chip, frozenset(tags or generic)) -> list of test paths
140+
group_map: dict[tuple[str, frozenset[str]], list[str]] = {}
141+
142+
for ci_path in find_tests():
143+
# Filter by test type if provided
144+
try:
145+
rel = ci_path.relative_to(TESTS_ROOT)
146+
parts = rel.parts
147+
if not parts:
148+
continue
149+
test_type = parts[0]
150+
except Exception:
151+
continue
152+
if types and test_type not in types:
153+
continue
154+
155+
ci = read_json(ci_path)
156+
test_dir = str(ci_path.parent)
157+
sketch = sketch_name_from_ci(ci_path)
158+
for chip in chips:
159+
tags = load_tags_for_test(ci, chip)
160+
if not test_enabled_for_target(ci, chip):
161+
continue
162+
# Skip tests that explicitly disable the hardware platform
163+
if not platform_allowed(ci, "hardware"):
164+
continue
165+
sdk = sdkconfig_path_for(chip, sketch, ci)
166+
if not args.dry_run and not sdk_meets_requirements(sdk, ci):
167+
continue
168+
key_tags = tags.copy()
169+
# SOC must always be one runner tag
170+
key_tags.add(chip)
171+
if len(key_tags) == 1:
172+
# Only SOC present, add generic
173+
key_tags.add("generic")
174+
key = (chip, frozenset(sorted(key_tags)))
175+
group_map.setdefault(key, []).append(test_dir)
176+
177+
# Load template job
178+
template_path = REPO_ROOT / '.gitlab/workflows/hw_test_template.yml'
179+
template = yaml.safe_load(template_path.read_text(encoding='utf-8'))
180+
if not isinstance(template, dict) or 'hw-test-template' not in template:
181+
print('ERROR: hw_test_template.yml missing hw-test-template')
182+
sys.exit(2)
183+
base_job = template['hw-test-template']
184+
185+
# Build child pipeline YAML
186+
jobs = {}
187+
for (chip, tagset), test_dirs in group_map.items():
188+
tag_list = sorted(list(tagset))
189+
tag_str = "-".join(tag_list)
190+
job_name = f"hw-{chip}-{tag_str}"[:255]
191+
192+
# Clone base job and adjust
193+
job = yaml.safe_load(yaml.safe_dump(base_job))
194+
# Ensure tags include SOC+extras
195+
job['tags'] = tag_list
196+
# Force type to 'all' so tests_run.sh discovers correct directories per -s
197+
vars_block = job.get('variables', {})
198+
vars_block['TEST_TYPE'] = 'all'
199+
vars_block['TEST_CHIP'] = chip
200+
vars_block['TEST_TYPES'] = args.types
201+
# Provide list of test directories for this job
202+
vars_block['TEST_LIST'] = "\n".join(sorted(test_dirs))
203+
job['variables'] = vars_block
204+
205+
# Override script to run only selected tests
206+
job['script'] = [
207+
'echo Using binaries for $TEST_CHIP',
208+
'ls -laR ~/.arduino/tests || true',
209+
'set -e',
210+
'rc=0',
211+
'while IFS= read -r d; do \n [ -z "$d" ] && continue; \n sketch=$(basename "$d"); \n echo Running $sketch in $d; \n bash .github/scripts/tests_run.sh -t $TEST_CHIP -s $sketch -e || rc=$?; \n done <<< "$TEST_LIST"; exit $rc',
212+
]
213+
214+
# Override before_script to fetch only this SOC's artifacts for all TEST_TYPES
215+
job['before_script'] = [
216+
'echo Running hardware tests for chip:$TEST_CHIP',
217+
'pip install -U pip',
218+
'apt-get update',
219+
'apt-get install -y jq unzip curl',
220+
'rm -rf ~/.arduino/tests',
221+
'mkdir -p ~/.arduino/tests/$TEST_CHIP',
222+
'IFS=',' read -r -a types <<< "$TEST_TYPES"; for t in ${types[@]}; do export TEST_TYPE="$t"; echo Fetching binaries for $TEST_CHIP $t; bash .gitlab/scripts/get_artifacts.sh; done',
223+
'pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi',
224+
]
225+
226+
jobs[job_name] = job
227+
228+
if args.dry_run:
229+
print("Planned hardware test jobs:")
230+
for name, job in jobs.items():
231+
tags = job.get('tags', [])
232+
soc = job.get('variables', {}).get('TEST_CHIP')
233+
tlist = job.get('variables', {}).get('TEST_LIST', '')
234+
tests = [p for p in tlist.split('\n') if p]
235+
print(f"- {name} tags={tags} soc={soc} tests={len(tests)}")
236+
for t in tests:
237+
print(f" * {t}")
238+
239+
# If no jobs matched, create a no-op job to avoid failing trigger
240+
if not jobs:
241+
jobs["no-op"] = {
242+
"stage": "test",
243+
"script": ["echo No matching hardware tests to run"],
244+
"rules": [{"when": "on_success"}],
245+
}
246+
247+
# Ensure child pipeline defines stages
248+
child = {"stages": ["test"]}
249+
child.update(jobs)
250+
251+
if args.dry_run:
252+
print("\n--- Generated child pipeline YAML (dry run) ---")
253+
sys.stdout.write(yaml.safe_dump(child, sort_keys=False))
254+
return 0
255+
256+
out = Path(args.out)
257+
out.write_text(yaml.safe_dump(child, sort_keys=False), encoding="utf-8")
258+
print(f"Wrote child pipeline with {len(jobs)} job(s) to {out}")
259+
260+
261+
if __name__ == "__main__":
262+
sys.exit(main())
263+
264+

.gitlab/scripts/get_artifacts.sh

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
#!/bin/bash
2+
# Disable shellcheck warning about $? uses.
3+
# shellcheck disable=SC2181
4+
5+
set -e
6+
set -o pipefail
7+
8+
echo "Downloading test binaries for $TEST_CHIP from GitHub repository $GITHUB_REPOSITORY"
9+
echo "Binaries run ID: $BINARIES_RUN_ID"
10+
echo "Looking for artifact: test-bin-$TEST_CHIP-$TEST_TYPE"
11+
12+
# Check if GitHub token is available
13+
if [ -z "$GITHUB_DOWNLOAD_PAT" ]; then
14+
echo "ERROR: GITHUB_DOWNLOAD_PAT not available in GitLab environment"
15+
echo "Please set up GITHUB_DOWNLOAD_PAT in GitLab CI/CD variables"
16+
exit 1
17+
fi
18+
19+
# First, get the artifacts list and save it for debugging
20+
echo "Fetching artifacts list from GitHub API..."
21+
artifacts_response=$(curl -s -H "Authorization: token $GITHUB_DOWNLOAD_PAT" \
22+
-H "Accept: application/vnd.github.v3+json" \
23+
"https://api.github.com/repos/$GITHUB_REPOSITORY/actions/runs/$BINARIES_RUN_ID/artifacts")
24+
25+
echo "GitHub API response:"
26+
echo "$artifacts_response"
27+
28+
# Check if we got a valid response
29+
if [ -z "$artifacts_response" ]; then
30+
echo "ERROR: Empty response from GitHub API"
31+
exit 1
32+
fi
33+
34+
# Check for API errors
35+
error_message=$(echo "$artifacts_response" | jq -r '.message // empty' 2>/dev/null)
36+
if [ -n "$error_message" ]; then
37+
echo "ERROR: GitHub API returned error: $error_message"
38+
exit 1
39+
fi
40+
41+
# List all available artifacts for debugging
42+
echo "Available artifacts:"
43+
echo "$artifacts_response" | jq -r '.artifacts[]?.name // "No artifacts found"' 2>/dev/null || echo "Could not parse artifacts"
44+
45+
# Find the download URL for our specific artifact
46+
download_url=$(echo "$artifacts_response" | jq -r ".artifacts[] | select(.name==\"test-bin-$TEST_CHIP-$TEST_TYPE\") | .archive_download_url" 2>/dev/null)
47+
48+
if [ "$download_url" = "null" ] || [ -z "$download_url" ]; then
49+
echo "ERROR: Could not find artifact 'test-bin-$TEST_CHIP-$TEST_TYPE'"
50+
echo "This could mean:"
51+
echo "1. The artifact name doesn't match exactly"
52+
echo "2. The artifacts haven't been uploaded yet"
53+
echo "3. The GitHub run ID is incorrect"
54+
exit 1
55+
fi
56+
57+
echo "Found download URL: $download_url"
58+
59+
# Download the artifact
60+
echo "Downloading artifact..."
61+
curl -H "Authorization: token $GITHUB_DOWNLOAD_PAT" -L "$download_url" -o test-binaries.zip
62+
63+
if [ $? -ne 0 ] || [ ! -f test-binaries.zip ]; then
64+
echo "ERROR: Failed to download artifact"
65+
exit 1
66+
fi
67+
68+
echo "Extracting binaries..."
69+
unzip -o test-binaries.zip -d ~/.arduino/tests/"$TEST_CHIP"/
70+
71+
if [ $? -ne 0 ]; then
72+
echo "ERROR: Failed to extract binaries"
73+
exit 1
74+
fi
75+
76+
rm -f test-binaries.zip
77+
echo "Successfully downloaded and extracted test binaries"

.gitlab/workflows/common.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,10 @@
44

55
stages:
66
- pre_check
7+
- generate
78
- build
89
- test
10+
- trigger
911
- result
1012

1113
variables:

0 commit comments

Comments
 (0)