|
| 1 | +#!/usr/bin/env python3 |
| 2 | +import argparse |
| 3 | +import json |
| 4 | +import os |
| 5 | +import sys |
| 6 | +from pathlib import Path |
| 7 | + |
| 8 | +import yaml # PyYAML is available in python image |
| 9 | + |
| 10 | + |
| 11 | +# Resolve repository root from this script location: .gitlab/scripts -> esp32 root |
| 12 | +SCRIPT_DIR = Path(__file__).resolve().parent |
| 13 | +REPO_ROOT = SCRIPT_DIR.parent.parent |
| 14 | + |
| 15 | +# Ensure we run from repo root so relative paths work consistently |
| 16 | +try: |
| 17 | + os.chdir(REPO_ROOT) |
| 18 | +except Exception: |
| 19 | + pass |
| 20 | + |
| 21 | +TESTS_ROOT = REPO_ROOT / "tests" |
| 22 | + |
| 23 | + |
| 24 | +def read_json(p: Path): |
| 25 | + try: |
| 26 | + with p.open("r", encoding="utf-8") as f: |
| 27 | + return json.load(f) |
| 28 | + except Exception: |
| 29 | + return {} |
| 30 | + |
| 31 | + |
| 32 | +def find_tests() -> list[Path]: |
| 33 | + tests = [] |
| 34 | + if not TESTS_ROOT.exists(): |
| 35 | + return tests |
| 36 | + for ci in TESTS_ROOT.rglob("ci.json"): |
| 37 | + if ci.is_file(): |
| 38 | + tests.append(ci) |
| 39 | + return tests |
| 40 | + |
| 41 | + |
| 42 | +def load_tags_for_test(ci_json: dict, chip: str) -> set[str]: |
| 43 | + tags = set() |
| 44 | + # Global tags |
| 45 | + for key in ("tags"): |
| 46 | + v = ci_json.get(key) |
| 47 | + if isinstance(v, list): |
| 48 | + for e in v: |
| 49 | + if isinstance(e, str) and e.strip(): |
| 50 | + tags.add(e.strip()) |
| 51 | + # Per-SoC tags |
| 52 | + soc_tags = ci_json.get("soc_tags") |
| 53 | + if isinstance(soc_tags, dict): |
| 54 | + v = soc_tags.get(chip) |
| 55 | + if isinstance(v, list): |
| 56 | + for e in v: |
| 57 | + if isinstance(e, str) and e.strip(): |
| 58 | + tags.add(e.strip()) |
| 59 | + return tags |
| 60 | + |
| 61 | + |
| 62 | +def test_enabled_for_target(ci_json: dict, chip: str) -> bool: |
| 63 | + targets = ci_json.get("targets") |
| 64 | + if isinstance(targets, dict): |
| 65 | + v = targets.get(chip) |
| 66 | + if v is False: |
| 67 | + return False |
| 68 | + return True |
| 69 | + |
| 70 | + |
| 71 | +def platform_allowed(ci_json: dict, platform: str = "hardware") -> bool: |
| 72 | + platforms = ci_json.get("platforms") |
| 73 | + if isinstance(platforms, dict): |
| 74 | + v = platforms.get(platform) |
| 75 | + if v is False: |
| 76 | + return False |
| 77 | + return True |
| 78 | + |
| 79 | + |
| 80 | +def sketch_name_from_ci(ci_path: Path) -> str: |
| 81 | + # The sketch directory holds .ino named as the directory |
| 82 | + sketch_dir = ci_path.parent |
| 83 | + return sketch_dir.name |
| 84 | + |
| 85 | + |
| 86 | +def sdkconfig_path_for(chip: str, sketch: str, ci_json: dict) -> Path: |
| 87 | + # Match logic from tests_run.sh: if multiple FQBN entries -> build0.tmp |
| 88 | + fqbn = ci_json.get("fqbn", {}) if isinstance(ci_json, dict) else {} |
| 89 | + length = 0 |
| 90 | + if isinstance(fqbn, dict): |
| 91 | + v = fqbn.get(chip) |
| 92 | + if isinstance(v, list): |
| 93 | + length = len(v) |
| 94 | + if length <= 1: |
| 95 | + return Path.home() / f".arduino/tests/{chip}/{sketch}/build.tmp/sdkconfig" |
| 96 | + return Path.home() / f".arduino/tests/{chip}/{sketch}/build0.tmp/sdkconfig" |
| 97 | + |
| 98 | + |
| 99 | +def sdk_meets_requirements(sdkconfig: Path, ci_json: dict) -> bool: |
| 100 | + # Mirror check_requirements in sketch_utils.sh |
| 101 | + if not sdkconfig.exists(): |
| 102 | + # Build might have been skipped or failed; allow parent to skip scheduling |
| 103 | + return False |
| 104 | + try: |
| 105 | + requires = ci_json.get("requires") or [] |
| 106 | + requires_any = ci_json.get("requires_any") or [] |
| 107 | + content = sdkconfig.read_text(encoding="utf-8", errors="ignore") |
| 108 | + # AND requirements |
| 109 | + for req in requires: |
| 110 | + if not isinstance(req, str): |
| 111 | + continue |
| 112 | + if not any(line.startswith(req) for line in content.splitlines()): |
| 113 | + return False |
| 114 | + # OR requirements |
| 115 | + if requires_any: |
| 116 | + ok = any( |
| 117 | + any(line.startswith(req) for line in content.splitlines()) |
| 118 | + for req in requires_any if isinstance(req, str) |
| 119 | + ) |
| 120 | + if not ok: |
| 121 | + return False |
| 122 | + return True |
| 123 | + except Exception: |
| 124 | + return False |
| 125 | + |
| 126 | + |
| 127 | +def main(): |
| 128 | + ap = argparse.ArgumentParser() |
| 129 | + ap.add_argument("--chips", required=True, help="Comma-separated list of SoCs") |
| 130 | + ap.add_argument("--types", required=False, default="validation", |
| 131 | + help="Comma-separated test type directories under tests/") |
| 132 | + ap.add_argument("--out", required=True, help="Output YAML path for child pipeline") |
| 133 | + ap.add_argument("--dry-run", action="store_true", help="Print planned groups/jobs and skip sdkconfig requirement checks") |
| 134 | + args = ap.parse_args() |
| 135 | + |
| 136 | + chips = [c.strip() for c in args.chips.split(",") if c.strip()] |
| 137 | + types = [t.strip() for t in args.types.split(",") if t.strip()] |
| 138 | + |
| 139 | + # Aggregate mapping: (chip, frozenset(tags or generic)) -> list of test paths |
| 140 | + group_map: dict[tuple[str, frozenset[str]], list[str]] = {} |
| 141 | + |
| 142 | + for ci_path in find_tests(): |
| 143 | + # Filter by test type if provided |
| 144 | + try: |
| 145 | + rel = ci_path.relative_to(TESTS_ROOT) |
| 146 | + parts = rel.parts |
| 147 | + if not parts: |
| 148 | + continue |
| 149 | + test_type = parts[0] |
| 150 | + except Exception: |
| 151 | + continue |
| 152 | + if types and test_type not in types: |
| 153 | + continue |
| 154 | + |
| 155 | + ci = read_json(ci_path) |
| 156 | + test_dir = str(ci_path.parent) |
| 157 | + sketch = sketch_name_from_ci(ci_path) |
| 158 | + for chip in chips: |
| 159 | + tags = load_tags_for_test(ci, chip) |
| 160 | + if not test_enabled_for_target(ci, chip): |
| 161 | + continue |
| 162 | + # Skip tests that explicitly disable the hardware platform |
| 163 | + if not platform_allowed(ci, "hardware"): |
| 164 | + continue |
| 165 | + sdk = sdkconfig_path_for(chip, sketch, ci) |
| 166 | + if not args.dry_run and not sdk_meets_requirements(sdk, ci): |
| 167 | + continue |
| 168 | + key_tags = tags.copy() |
| 169 | + # SOC must always be one runner tag |
| 170 | + key_tags.add(chip) |
| 171 | + if len(key_tags) == 1: |
| 172 | + # Only SOC present, add generic |
| 173 | + key_tags.add("generic") |
| 174 | + key = (chip, frozenset(sorted(key_tags))) |
| 175 | + group_map.setdefault(key, []).append(test_dir) |
| 176 | + |
| 177 | + # Load template job |
| 178 | + template_path = REPO_ROOT / '.gitlab/workflows/hw_test_template.yml' |
| 179 | + template = yaml.safe_load(template_path.read_text(encoding='utf-8')) |
| 180 | + if not isinstance(template, dict) or 'hw-test-template' not in template: |
| 181 | + print('ERROR: hw_test_template.yml missing hw-test-template') |
| 182 | + sys.exit(2) |
| 183 | + base_job = template['hw-test-template'] |
| 184 | + |
| 185 | + # Build child pipeline YAML |
| 186 | + jobs = {} |
| 187 | + for (chip, tagset), test_dirs in group_map.items(): |
| 188 | + tag_list = sorted(list(tagset)) |
| 189 | + tag_str = "-".join(tag_list) |
| 190 | + job_name = f"hw-{chip}-{tag_str}"[:255] |
| 191 | + |
| 192 | + # Clone base job and adjust |
| 193 | + job = yaml.safe_load(yaml.safe_dump(base_job)) |
| 194 | + # Ensure tags include SOC+extras |
| 195 | + job['tags'] = tag_list |
| 196 | + # Force type to 'all' so tests_run.sh discovers correct directories per -s |
| 197 | + vars_block = job.get('variables', {}) |
| 198 | + vars_block['TEST_TYPE'] = 'all' |
| 199 | + vars_block['TEST_CHIP'] = chip |
| 200 | + vars_block['TEST_TYPES'] = args.types |
| 201 | + # Provide list of test directories for this job |
| 202 | + vars_block['TEST_LIST'] = "\n".join(sorted(test_dirs)) |
| 203 | + job['variables'] = vars_block |
| 204 | + |
| 205 | + # Override script to run only selected tests |
| 206 | + job['script'] = [ |
| 207 | + 'echo Using binaries for $TEST_CHIP', |
| 208 | + 'ls -laR ~/.arduino/tests || true', |
| 209 | + 'set -e', |
| 210 | + 'rc=0', |
| 211 | + 'while IFS= read -r d; do \n [ -z "$d" ] && continue; \n sketch=$(basename "$d"); \n echo Running $sketch in $d; \n bash .github/scripts/tests_run.sh -t $TEST_CHIP -s $sketch -e || rc=$?; \n done <<< "$TEST_LIST"; exit $rc', |
| 212 | + ] |
| 213 | + |
| 214 | + # Override before_script to fetch only this SOC's artifacts for all TEST_TYPES |
| 215 | + job['before_script'] = [ |
| 216 | + 'echo Running hardware tests for chip:$TEST_CHIP', |
| 217 | + 'pip install -U pip', |
| 218 | + 'apt-get update', |
| 219 | + 'apt-get install -y jq unzip curl', |
| 220 | + 'rm -rf ~/.arduino/tests', |
| 221 | + 'mkdir -p ~/.arduino/tests/$TEST_CHIP', |
| 222 | + 'IFS=',' read -r -a types <<< "$TEST_TYPES"; for t in ${types[@]}; do export TEST_TYPE="$t"; echo Fetching binaries for $TEST_CHIP $t; bash .gitlab/scripts/get_artifacts.sh; done', |
| 223 | + 'pip install -r tests/requirements.txt --extra-index-url https://dl.espressif.com/pypi', |
| 224 | + ] |
| 225 | + |
| 226 | + jobs[job_name] = job |
| 227 | + |
| 228 | + if args.dry_run: |
| 229 | + print("Planned hardware test jobs:") |
| 230 | + for name, job in jobs.items(): |
| 231 | + tags = job.get('tags', []) |
| 232 | + soc = job.get('variables', {}).get('TEST_CHIP') |
| 233 | + tlist = job.get('variables', {}).get('TEST_LIST', '') |
| 234 | + tests = [p for p in tlist.split('\n') if p] |
| 235 | + print(f"- {name} tags={tags} soc={soc} tests={len(tests)}") |
| 236 | + for t in tests: |
| 237 | + print(f" * {t}") |
| 238 | + |
| 239 | + # If no jobs matched, create a no-op job to avoid failing trigger |
| 240 | + if not jobs: |
| 241 | + jobs["no-op"] = { |
| 242 | + "stage": "test", |
| 243 | + "script": ["echo No matching hardware tests to run"], |
| 244 | + "rules": [{"when": "on_success"}], |
| 245 | + } |
| 246 | + |
| 247 | + # Ensure child pipeline defines stages |
| 248 | + child = {"stages": ["test"]} |
| 249 | + child.update(jobs) |
| 250 | + |
| 251 | + if args.dry_run: |
| 252 | + print("\n--- Generated child pipeline YAML (dry run) ---") |
| 253 | + sys.stdout.write(yaml.safe_dump(child, sort_keys=False)) |
| 254 | + return 0 |
| 255 | + |
| 256 | + out = Path(args.out) |
| 257 | + out.write_text(yaml.safe_dump(child, sort_keys=False), encoding="utf-8") |
| 258 | + print(f"Wrote child pipeline with {len(jobs)} job(s) to {out}") |
| 259 | + |
| 260 | + |
| 261 | +if __name__ == "__main__": |
| 262 | + sys.exit(main()) |
| 263 | + |
| 264 | + |
0 commit comments