|
6 | 6 | """Tool to manage the KernelCI YAML pipeline configuration""" |
7 | 7 |
|
8 | 8 | import os |
| 9 | +import sys |
| 10 | +import json |
9 | 11 |
|
10 | 12 | import click |
11 | 13 | import yaml |
12 | 14 |
|
13 | 15 | import kernelci.config |
| 16 | +import kernelci.api.helper |
14 | 17 | from . import Args, kci |
15 | 18 |
|
16 | 19 |
|
@@ -75,3 +78,124 @@ def dump(section, config, indent, recursive): |
75 | 78 | else: |
76 | 79 | echo = click.echo_via_pager if recursive else click.echo |
77 | 80 | echo(yaml.dump(data, indent=indent)) |
| 81 | + |
| 82 | + |
| 83 | +def validate_rules(node, rules): |
| 84 | + """Validate if the node should be created based on the rules""" |
| 85 | + helper = kernelci.api.helper.APIHelper(None) |
| 86 | + return helper.should_create_node(rules, node) |
| 87 | + |
| 88 | + |
| 89 | +def compare_builds(merged_data): |
| 90 | + """ |
| 91 | + Compare kbuilds and print builds with identical params |
| 92 | + """ |
| 93 | + result = "" |
| 94 | + jobs = merged_data.get("jobs") |
| 95 | + if not jobs: |
| 96 | + click.echo("No jobs found in the merged data, " |
| 97 | + "maybe you need to add parameter " |
| 98 | + "-c path/kernelci-pipeline/config?") |
| 99 | + sys.exit(1) |
| 100 | + kbuilds_list = [] |
| 101 | + for job in jobs: |
| 102 | + if jobs[job].get("kind") == "kbuild": |
| 103 | + kbuilds_list.append(job) |
| 104 | + |
| 105 | + kbuilds_dict = {} |
| 106 | + for kbuild in kbuilds_list: |
| 107 | + params = jobs[kbuild].get("params", {}) |
| 108 | + # Convert params to a hashable type by serializing to JSON |
| 109 | + key = json.dumps(params, sort_keys=True) |
| 110 | + if key not in kbuilds_dict: |
| 111 | + kbuilds_dict[key] = [] |
| 112 | + kbuilds_dict[key].append(kbuild) |
| 113 | + |
| 114 | + # print builds with identical params |
| 115 | + for params, kbuild_list in kbuilds_dict.items(): |
| 116 | + if len(kbuild_list) > 1: |
| 117 | + result += f"Params {params}: {kbuild_list}," |
| 118 | + |
| 119 | + return result |
| 120 | + |
| 121 | + |
| 122 | +# pylint: disable=too-many-branches disable=too-many-locals |
| 123 | +def do_forecast(merged_data): |
| 124 | + """ |
| 125 | + We will simulate checkout event on each tree/branch |
| 126 | + and try to build list of builds/tests it will run |
| 127 | + """ |
| 128 | + checkouts = [] |
| 129 | + build_configs = merged_data.get("build_configs", {}) |
| 130 | + for bcfg in build_configs: |
| 131 | + data = build_configs[bcfg] |
| 132 | + if not data.get("architectures"): |
| 133 | + data["architectures"] = None |
| 134 | + checkouts.append(data) |
| 135 | + |
| 136 | + # sort checkouts by tree and branch |
| 137 | + checkouts.sort(key=lambda x: (x.get("tree", ""), x.get("branch", ""))) |
| 138 | + |
| 139 | + # iterate over checkouts |
| 140 | + for checkout in checkouts: |
| 141 | + checkout["kbuilds"] = [] |
| 142 | + # iterate over events (jobs) |
| 143 | + jobs = merged_data.get("scheduler", []) |
| 144 | + for job in jobs: |
| 145 | + kind = job.get("event", {}).get("kind") |
| 146 | + if kind != "checkout": |
| 147 | + continue |
| 148 | + job_name = job.get("job") |
| 149 | + job_kind = merged_data.get("jobs", {}).get(job_name, {}).get("kind") |
| 150 | + if job_kind == "kbuild": |
| 151 | + # check "params" "arch" |
| 152 | + job_params = merged_data.get("jobs", {}).get(job_name, {}).get("params", {}) |
| 153 | + arch = job_params.get("arch") |
| 154 | + if checkout.get("architectures") and arch not in checkout.get("architectures"): |
| 155 | + continue |
| 156 | + scheduler_rules = job.get("rules", []) |
| 157 | + job = merged_data.get("jobs", {}).get(job_name, {}) |
| 158 | + job_rules = job.get("rules", []) |
| 159 | + node = { |
| 160 | + "kind": "checkout", |
| 161 | + "data": { |
| 162 | + "kernel_revision": { |
| 163 | + "tree": checkout.get("tree"), |
| 164 | + "branch": checkout.get("branch"), |
| 165 | + "version": { |
| 166 | + "version": 6, |
| 167 | + "patchlevel": 16, |
| 168 | + "extra": "-rc3-973-gb7d1bbd97f77" |
| 169 | + }, |
| 170 | + } |
| 171 | + }, |
| 172 | + } |
| 173 | + if not validate_rules(node, job_rules) or not validate_rules(node, scheduler_rules): |
| 174 | + continue |
| 175 | + checkout["kbuilds"].append(job_name) |
| 176 | + checkout["kbuilds_identical"] = compare_builds(merged_data) |
| 177 | + |
| 178 | + # print the results |
| 179 | + for checkout in checkouts: |
| 180 | + print(f"Checkout: {checkout.get('tree')}:{checkout.get('branch')}") |
| 181 | + if checkout.get("kbuilds_identical"): |
| 182 | + print(f" Identical builds: {checkout['kbuilds_identical']}") |
| 183 | + if checkout.get("kbuilds"): |
| 184 | + num_builds = len(checkout["kbuilds"]) |
| 185 | + print(f" Number of builds: {num_builds}") |
| 186 | + print(" Builds:") |
| 187 | + for build in checkout["kbuilds"]: |
| 188 | + print(f" - {build}") |
| 189 | + else: |
| 190 | + print(" No builds found for this checkout") |
| 191 | + |
| 192 | + |
| 193 | +@kci_config.command |
| 194 | +@Args.config |
| 195 | +def forecast(config): |
| 196 | + """Dump entries from the SECTION of the pipeline YAML configuration""" |
| 197 | + config_paths = kernelci.config.get_config_paths(config) |
| 198 | + if not config_paths: |
| 199 | + return |
| 200 | + data = kernelci.config.load_yaml(config_paths) |
| 201 | + do_forecast(data) |
0 commit comments