|
5 | 5 | import argparse
|
6 | 6 | import hashlib
|
7 | 7 | import os
|
| 8 | +import re |
8 | 9 | import shlex
|
9 | 10 | import subprocess
|
10 | 11 | import textwrap
|
| 12 | +import urllib.request |
11 | 13 | from pathlib import Path
|
12 | 14 |
|
13 | 15 | import pykwalify.core
|
14 | 16 | import yaml
|
15 | 17 | from west.commands import WestCommand
|
16 | 18 |
|
17 | 19 | try:
|
| 20 | + from yaml import CSafeDumper as SafeDumper |
18 | 21 | from yaml import CSafeLoader as SafeLoader
|
19 | 22 | except ImportError:
|
20 |
| - from yaml import SafeLoader |
| 23 | + from yaml import SafeDumper, SafeLoader |
21 | 24 |
|
22 | 25 | WEST_PATCH_SCHEMA_PATH = Path(__file__).parents[1] / "schemas" / "patch-schema.yml"
|
23 | 26 | with open(WEST_PATCH_SCHEMA_PATH) as f:
|
@@ -61,6 +64,11 @@ def do_add_parser(self, parser_adder):
|
61 | 64 | Run "west patch list" to list patches.
|
62 | 65 | See "west patch list --help" for details.
|
63 | 66 |
|
| 67 | + Fetching Patches: |
| 68 | +
|
| 69 | + Run "west patch gh-fetch" to fetch patches from Github. |
| 70 | + See "west patch gh-fetch --help" for details. |
| 71 | +
|
64 | 72 | YAML File Format:
|
65 | 73 |
|
66 | 74 | The patches.yml syntax is described in "scripts/schemas/patch-schema.yml".
|
@@ -166,6 +174,67 @@ def do_add_parser(self, parser_adder):
|
166 | 174 | ),
|
167 | 175 | )
|
168 | 176 |
|
| 177 | + gh_fetch_arg_parser = subparsers.add_parser( |
| 178 | + "gh-fetch", |
| 179 | + help="Fetch patch from Github", |
| 180 | + formatter_class=argparse.RawDescriptionHelpFormatter, |
| 181 | + epilog=textwrap.dedent( |
| 182 | + """ |
| 183 | + Fetching Patches from Github: |
| 184 | +
|
| 185 | + Run "west patch gh-fetch" to fetch a PR from Github and store it as a patch. |
| 186 | + The meta data is generated and appended to the provided patches.yml file. |
| 187 | +
|
| 188 | + If no patches.yml file exists, it will be created. |
| 189 | + """ |
| 190 | + ), |
| 191 | + ) |
| 192 | + gh_fetch_arg_parser.add_argument( |
| 193 | + "-o", |
| 194 | + "--owner", |
| 195 | + action="store", |
| 196 | + default="zephyrproject-rtos", |
| 197 | + help="Github repository owner", |
| 198 | + ) |
| 199 | + gh_fetch_arg_parser.add_argument( |
| 200 | + "-r", |
| 201 | + "--repo", |
| 202 | + action="store", |
| 203 | + default="zephyr", |
| 204 | + help="Github repository", |
| 205 | + ) |
| 206 | + gh_fetch_arg_parser.add_argument( |
| 207 | + "-pr", |
| 208 | + "--pull-request", |
| 209 | + metavar="ID", |
| 210 | + action="store", |
| 211 | + required=True, |
| 212 | + type=int, |
| 213 | + help="Github Pull Request ID", |
| 214 | + ) |
| 215 | + gh_fetch_arg_parser.add_argument( |
| 216 | + "-m", |
| 217 | + "--module", |
| 218 | + metavar="DIR", |
| 219 | + action="store", |
| 220 | + required=True, |
| 221 | + type=Path, |
| 222 | + help="Module path", |
| 223 | + ) |
| 224 | + gh_fetch_arg_parser.add_argument( |
| 225 | + "-s", |
| 226 | + "--split-commits", |
| 227 | + action="store_true", |
| 228 | + help="Create patch files for each commit instead of a single patch for the entire PR", |
| 229 | + ) |
| 230 | + gh_fetch_arg_parser.add_argument( |
| 231 | + '-t', |
| 232 | + '--token', |
| 233 | + metavar='FILE', |
| 234 | + dest='tokenfile', |
| 235 | + help='File containing GitHub token (alternatively, use GITHUB_TOKEN env variable)', |
| 236 | + ) |
| 237 | + |
169 | 238 | subparsers.add_parser(
|
170 | 239 | "list",
|
171 | 240 | help="List patches",
|
@@ -197,34 +266,41 @@ def filter_args(self, args):
|
197 | 266 | if args.west_workspace.is_relative_to(_WEST_TOPDIR):
|
198 | 267 | args.west_workspace = topdir / args.west_workspace.relative_to(_WEST_TOPDIR)
|
199 | 268 |
|
200 |
| - def do_run(self, args, _): |
201 |
| - self.filter_args(args) |
202 |
| - |
| 269 | + def load_yml(self, args, allow_missing): |
203 | 270 | if not os.path.isfile(args.patch_yml):
|
204 |
| - self.inf(f"no patches to apply: {args.patch_yml} not found") |
205 |
| - return |
| 271 | + if not allow_missing: |
| 272 | + self.inf(f"no patches to apply: {args.patch_yml} not found") |
| 273 | + return None |
206 | 274 |
|
207 |
| - west_config = Path(args.west_workspace) / ".west" / "config" |
208 |
| - if not os.path.isfile(west_config): |
209 |
| - self.die(f"{args.west_workspace} is not a valid west workspace") |
| 275 | + # Return the schema defaults |
| 276 | + return pykwalify.core.Core(source_data={}, schema_data=patches_schema).validate() |
210 | 277 |
|
211 | 278 | try:
|
212 | 279 | with open(args.patch_yml) as f:
|
213 | 280 | yml = yaml.load(f, Loader=SafeLoader)
|
214 |
| - if not yml: |
215 |
| - self.inf(f"{args.patch_yml} is empty") |
216 |
| - return |
217 |
| - pykwalify.core.Core(source_data=yml, schema_data=patches_schema).validate() |
| 281 | + return pykwalify.core.Core(source_data=yml, schema_data=patches_schema).validate() |
218 | 282 | except (yaml.YAMLError, pykwalify.errors.SchemaError) as e:
|
219 | 283 | self.die(f"ERROR: Malformed yaml {args.patch_yml}: {e}")
|
220 | 284 |
|
| 285 | + def do_run(self, args, _): |
| 286 | + self.filter_args(args) |
| 287 | + |
| 288 | + west_config = Path(args.west_workspace) / ".west" / "config" |
| 289 | + if not os.path.isfile(west_config): |
| 290 | + self.die(f"{args.west_workspace} is not a valid west workspace") |
| 291 | + |
| 292 | + yml = self.load_yml(args, args.subcommand in ["gh-fetch"]) |
| 293 | + if yml is None: |
| 294 | + return |
| 295 | + |
221 | 296 | if not args.subcommand:
|
222 | 297 | args.subcommand = "list"
|
223 | 298 |
|
224 | 299 | method = {
|
225 | 300 | "apply": self.apply,
|
226 | 301 | "clean": self.clean,
|
227 | 302 | "list": self.list,
|
| 303 | + "gh-fetch": self.gh_fetch, |
228 | 304 | }
|
229 | 305 |
|
230 | 306 | method[args.subcommand](args, yml, args.modules)
|
@@ -348,6 +424,72 @@ def list(self, args, yml, mods=None):
|
348 | 424 | continue
|
349 | 425 | self.inf(patch_info)
|
350 | 426 |
|
| 427 | + def gh_fetch(self, args, yml, mods=None): |
| 428 | + if mods: |
| 429 | + self.die( |
| 430 | + "Module filters are not available for the gh-fetch subcommand, " |
| 431 | + "pass a single -m/--module argument after the subcommand." |
| 432 | + ) |
| 433 | + |
| 434 | + try: |
| 435 | + from github import Auth, Github |
| 436 | + except ImportError: |
| 437 | + self.die("PyGithub not found; can be installed with 'pip install PyGithub'") |
| 438 | + |
| 439 | + gh = Github(auth=Auth.Token(args.tokenfile) if args.tokenfile else None) |
| 440 | + pr = gh.get_repo(f"{args.owner}/{args.repo}").get_pull(args.pull_request) |
| 441 | + args.patch_base.mkdir(parents=True, exist_ok=True) |
| 442 | + |
| 443 | + if args.split_commits: |
| 444 | + for cm in pr.get_commits(): |
| 445 | + subject = cm.commit.message.splitlines()[0] |
| 446 | + filename = "-".join(filter(None, re.split("[^a-zA-Z0-9]+", subject))) + ".patch" |
| 447 | + |
| 448 | + # No patch URL is provided by the API, but appending .patch to the HTML works too |
| 449 | + urllib.request.urlretrieve(f"{cm.html_url}.patch", args.patch_base / filename) |
| 450 | + |
| 451 | + patch_info = { |
| 452 | + "path": filename, |
| 453 | + "sha256sum": self.get_file_sha256sum(args.patch_base / filename), |
| 454 | + "module": str(args.module), |
| 455 | + "author": cm.commit.author.name or "Hidden", |
| 456 | + "email": cm. commit. author. email or "[email protected]", |
| 457 | + "date": cm.commit.author.date.strftime("%Y-%m-%d"), |
| 458 | + "upstreamable": True, |
| 459 | + "merge-pr": pr.html_url, |
| 460 | + "merge-status": pr.merged, |
| 461 | + } |
| 462 | + |
| 463 | + yml.setdefault("patches", []).append(patch_info) |
| 464 | + else: |
| 465 | + filename = "-".join(filter(None, re.split("[^a-zA-Z0-9]+", pr.title))) + ".patch" |
| 466 | + urllib.request.urlretrieve(pr.patch_url, args.patch_base / filename) |
| 467 | + |
| 468 | + patch_info = { |
| 469 | + "path": filename, |
| 470 | + "sha256sum": self.get_file_sha256sum(args.patch_base / filename), |
| 471 | + "module": str(args.module), |
| 472 | + "author": pr.user.name or "Hidden", |
| 473 | + "email": pr. user. email or "[email protected]", |
| 474 | + "date": pr.created_at.strftime("%Y-%m-%d"), |
| 475 | + "upstreamable": True, |
| 476 | + "merge-pr": pr.html_url, |
| 477 | + "merge-status": pr.merged, |
| 478 | + } |
| 479 | + |
| 480 | + yml.setdefault("patches", []).append(patch_info) |
| 481 | + |
| 482 | + args.patch_yml.parent.mkdir(parents=True, exist_ok=True) |
| 483 | + with open(args.patch_yml, "w") as f: |
| 484 | + yaml.dump(yml, f, Dumper=SafeDumper) |
| 485 | + |
| 486 | + @staticmethod |
| 487 | + def get_file_sha256sum(filename: Path) -> str: |
| 488 | + with open(filename, "rb") as fp: |
| 489 | + digest = hashlib.file_digest(fp, "sha256") |
| 490 | + |
| 491 | + return digest.hexdigest() |
| 492 | + |
351 | 493 | @staticmethod
|
352 | 494 | def get_mod_paths(args, yml):
|
353 | 495 | patches = yml.get("patches", [])
|
|
0 commit comments