diff --git a/.github/workflows/assigner-workflow.yml b/.github/workflows/assigner-workflow.yml new file mode 100644 index 00000000000..8bfc37ba225 --- /dev/null +++ b/.github/workflows/assigner-workflow.yml @@ -0,0 +1,76 @@ +name: Pull Request Assigner Completion Workflow + +# read-write repo token +# access to secrets +on: + workflow_run: + workflows: ["Pull Request Assigner"] + types: + - completed + +permissions: + contents: read + +jobs: + assignment: + name: Pull Request Assignment + runs-on: ubuntu-24.04 + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + + steps: + - name: Check out source code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + persist-credentials: false + - name: Download artifacts + id: download-artifacts + uses: dawidd6/action-download-artifact@ac66b43f0e6a346234dd65d4d0c8fbb31cb316e5 # v11 + with: + workflow: assigner.yml + run_id: ${{ github.event.workflow_run.id }} + if_no_artifact_found: ignore + + - name: Load PR number + if: steps.download-artifacts.outputs.found_artifact == 'true' + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + let fs = require("fs"); + let pr_number = Number(fs.readFileSync("./pr/NR")); + core.exportVariable("PR_NUM", pr_number); + + - name: Check PR number + if: steps.download-artifacts.outputs.found_artifact == 'true' + id: check-pr + uses: carpentries/actions/check-valid-pr@2e20fd5ee53b691e27455ce7ca3b16ea885140e8 # v0.15.0 + with: + pr: ${{ env.PR_NUM }} + sha: ${{ github.event.workflow_run.head_sha }} + + - name: Validate PR number + if: | + steps.download-artifacts.outputs.found_artifact == 'true' && + steps.check-pr.outputs.VALID != 'true' + run: | + echo "ABORT: PR number validation failed!" + exit 1 + + - name: Set up Python + uses: zephyrproject-rtos/action-python-env@main + with: + python-version: 3.12 + + - name: Run assignment script + env: + GITHUB_TOKEN: ${{ secrets.ZB_PR_ASSIGNER_GITHUB_TOKEN }} + run: | + if [ -f "./pr/manifest_areas.json" ]; then + ARGS="--areas ./pr/manifest_areas.json" + else + ARGS="" + fi + python3 scripts/set_assignees.py -P ${{ env.PR_NUM }} -M MAINTAINERS.yml -v \ + --repo ${{ github.event.repository.name }} ${ARGS} diff --git a/.github/workflows/assigner.yml b/.github/workflows/assigner.yml index 969fa8b4bdc..6a68be5da26 100644 --- a/.github/workflows/assigner.yml +++ b/.github/workflows/assigner.yml @@ -1,7 +1,7 @@ name: Pull Request Assigner on: - pull_request_target: + pull_request: types: - opened - synchronize @@ -24,41 +24,62 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 permissions: - pull-requests: write # to add assignees to pull requests issues: write # to add assignees to issues steps: - - name: Check out source code - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + - name: Check out source code + uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0 + with: + fetch-depth: 0 + persist-credentials: false - - name: Set up Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - with: - python-version: 3.12 - cache: pip - cache-dependency-path: scripts/requirements-actions.txt + - name: Set up Python + uses: zephyrproject-rtos/action-python-env@main + with: + python-version: 3.12 - - name: Install Python packages - run: | - pip install -r scripts/requirements-actions.txt --require-hashes + - name: west setup + if: > + github.event_name == 'pull_request' + run: | + git config --global user.email "you@example.com" + git config --global user.name "Your Name" + west init -l . || true + mkdir -p ./pr - - name: Run assignment script - env: - GITHUB_TOKEN: ${{ secrets.ZB_PR_ASSIGNER_GITHUB_TOKEN }} - run: | - FLAGS="-v" - FLAGS+=" -o ${{ github.event.repository.owner.login }}" - FLAGS+=" -r ${{ github.event.repository.name }}" - FLAGS+=" -M MAINTAINERS.yml" - if [ "${{ github.event_name }}" = "pull_request_target" ]; then - FLAGS+=" -P ${{ github.event.pull_request.number }}" - elif [ "${{ github.event_name }}" = "issues" ]; then + - name: Run assignment script + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + FLAGS="-v" + FLAGS+=" -o ${{ github.event.repository.owner.login }}" + FLAGS+=" -r ${{ github.event.repository.name }}" + FLAGS+=" -M MAINTAINERS.yml" + if [ "${{ github.event_name }}" = "pull_request" ]; then + FLAGS+=" -P ${{ github.event.pull_request.number }} --manifest -c origin/${{ github.base_ref }}.." + python3 scripts/set_assignees.py $FLAGS + cp -f manifest_areas.json ./pr/ + elif [ "${{ github.event_name }}" = "issues" ]; then FLAGS+=" -I ${{ github.event.issue.number }}" - elif [ "${{ github.event_name }}" = "schedule" ]; then + python3 scripts/set_assignees.py $FLAGS + elif [ "${{ github.event_name }}" = "schedule" ]; then FLAGS+=" --modules" - else - echo "Unknown event: ${{ github.event_name }}" - exit 1 - fi + python3 scripts/set_assignees.py $FLAGS + else + echo "Unknown event: ${{ github.event_name }}" + exit 1 + fi + + + - name: Save PR number + if: > + github.event_name == 'pull_request' + run: | + echo ${{ github.event.number }} > ./pr/NR + - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2 + if: > + github.event_name == 'pull_request' + with: + name: pr + path: pr/ - python3 scripts/set_assignees.py $FLAGS diff --git a/doc/requirements.txt b/doc/requirements.txt index 25ec15e4ab3..a4467dc5df7 100644 --- a/doc/requirements.txt +++ b/doc/requirements.txt @@ -1,6 +1,10 @@ # This file was autogenerated by uv via the following command: # uv pip compile --universal --python-version 3.10 --generate-hashes requirements.in --output-file requirements.txt -alabaster==1.0.0 \ +alabaster==0.7.16 ; python_full_version < '3.10' \ + --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ + --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 + # via sphinx +alabaster==1.0.0 ; python_full_version >= '3.10' \ --hash=sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e \ --hash=sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b # via sphinx @@ -10,7 +14,11 @@ anyio==4.9.0 \ # via # starlette # watchfiles -anytree==2.13.0 \ +anytree==2.12.1 ; python_full_version < '3.9.2' \ + --hash=sha256:244def434ccf31b668ed282954e5d315b4e066c4940b94aff4a7962d85947830 \ + --hash=sha256:5ea9e61caf96db1e5b3d0a914378d2cd83c269dfce1fb8242ce96589fa3382f0 + # via -r requirements.in +anytree==2.13.0 ; python_full_version >= '3.9.2' \ --hash=sha256:4cbcf10df36b1f1cba131b7e487ff3edafc9d6e932a3c70071b5b768bab901ff \ --hash=sha256:c9d3aa6825fdd06af7ebb05b4ef291d2db63e62bb1f9b7d9b71354be9d362714 # via -r requirements.in @@ -116,7 +124,11 @@ charset-normalizer==3.4.2 \ --hash=sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a \ --hash=sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f # via requests -click==8.2.1 \ +click==8.1.8 ; python_full_version < '3.10' \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via uvicorn +click==8.2.1 ; python_full_version >= '3.10' \ --hash=sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202 \ --hash=sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b # via uvicorn @@ -165,6 +177,10 @@ imagesize==1.4.1 \ --hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \ --hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a # via sphinx +importlib-metadata==8.7.0 ; python_full_version < '3.10' \ + --hash=sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000 \ + --hash=sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd + # via sphinx iniconfig==2.1.0 \ --hash=sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7 \ --hash=sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760 @@ -354,9 +370,9 @@ pyserial==3.5 \ --hash=sha256:3c77e014170dfffbd816e6ffc205e9842efb10be9f58ec16d3e8675b4925cddb \ --hash=sha256:c4451db6ba391ca6ca299fb3ec7bae67a5c55dde170964c7a14ceefec02f2cf0 # via -r requirements.in -pytest==8.4.1 \ - --hash=sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7 \ - --hash=sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c +pytest==8.4.2 \ + --hash=sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01 \ + --hash=sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79 # via -r requirements.in python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ @@ -485,6 +501,7 @@ six==1.17.0 \ --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 # via + # anytree # doxmlparser # python-dateutil sniffio==1.3.1 \ @@ -495,7 +512,22 @@ snowballstemmer==3.0.1 \ --hash=sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064 \ --hash=sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895 # via sphinx -sphinx==8.1.3 ; python_full_version < '3.11' \ +sphinx==7.4.7 ; python_full_version < '3.10' \ + --hash=sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe \ + --hash=sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239 + # via + # -r requirements.in + # sphinx-autobuild + # sphinx-copybutton + # sphinx-last-updated-by-git + # sphinx-notfound-page + # sphinx-rtd-theme + # sphinx-tabs + # sphinx-togglebutton + # sphinxcontrib-jquery + # sphinxcontrib-programoutput + # sphinxcontrib-svg2pdfconverter +sphinx==8.1.3 ; python_full_version == '3.10.*' \ --hash=sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2 \ --hash=sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927 # via @@ -545,9 +577,9 @@ sphinx-rtd-theme==3.0.2 \ --hash=sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13 \ --hash=sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85 # via -r requirements.in -sphinx-sitemap==2.7.2 \ - --hash=sha256:1a6a8dcecb0ffb85fd37678f785cfcc40adfe3eebafb05e678971e5260b117e4 \ - --hash=sha256:819e028e27579b47efa0e2f863b87136b711c45f13e84730610e80316f6883da +sphinx-sitemap==2.8.0 \ + --hash=sha256:332042cd5b9385f61ec2861dfd550d9bccbdfcff86f6b68c7072cf40c9f16363 \ + --hash=sha256:749d7184a0c7b73d486a232b54b5c1b38a0e2d6f18cf19fb1b033b8162b44a82 # via -r requirements.in sphinx-tabs==3.4.7 \ --hash=sha256:991ad4a424ff54119799ba1491701aa8130dd43509474aef45a81c42d889784d \ @@ -832,3 +864,7 @@ wheel==0.45.1 \ --hash=sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729 \ --hash=sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248 # via sphinx-togglebutton +zipp==3.23.0 ; python_full_version < '3.10' \ + --hash=sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e \ + --hash=sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166 + # via importlib-metadata diff --git a/scripts/get_maintainer.py b/scripts/get_maintainer.py index 855a6bf3950..7ca14d9a773 100755 --- a/scripts/get_maintainer.py +++ b/scripts/get_maintainer.py @@ -109,6 +109,18 @@ def _parse_args(): nargs="?", help="List all areas maintained by maintainer.") + + area_parser = subparsers.add_parser( + "area", + help="List area(s) by name") + area_parser.add_argument( + "name", + metavar="AREA", + nargs="?", + help="List all areas with the given name.") + + area_parser.set_defaults(cmd_fn=Maintainers._area_cmd) + # New arguments for filtering areas_parser.add_argument( "--without-maintainers", @@ -220,6 +232,12 @@ def __init__(self, filename=None): self.areas[area_name] = area + def name2areas(self, name): + """ + Returns a list of Area instances for the areas that match 'name'. + """ + return [area for area in self.areas.values() if area.name == name] + def path2areas(self, path): """ Returns a list of Area instances for the areas that contain 'path', @@ -262,6 +280,14 @@ def __repr__(self): # Command-line subcommands # + def _area_cmd(self, args): + # 'area' subcommand implementation + + res = set() + areas = self.name2areas(args.name) + res.update(areas) + _print_areas(res) + def _path_cmd(self, args): # 'path' subcommand implementation diff --git a/scripts/set_assignees.py b/scripts/set_assignees.py index a428f102cfe..7d1e3438b10 100755 --- a/scripts/set_assignees.py +++ b/scripts/set_assignees.py @@ -8,16 +8,21 @@ import os import time import datetime +import json from github import Github, GithubException from github.GithubException import UnknownObjectException from collections import defaultdict from west.manifest import Manifest from west.manifest import ManifestProject +from git import Repo +from pathlib import Path TOP_DIR = os.path.join(os.path.dirname(__file__)) sys.path.insert(0, os.path.join(TOP_DIR, "scripts")) from get_maintainer import Maintainers +zephyr_base = os.getenv('ZEPHYR_BASE', os.path.join(TOP_DIR, '..')) + def log(s): if args.verbose > 0: print(s, file=sys.stdout) @@ -50,11 +55,73 @@ def parse_args(): parser.add_argument("-r", "--repo", default="zephyr", help="Github repository") + parser.add_argument("-c", "--commits", default=None, + help="Commit range in the form: a..b") + + parser.add_argument("--manifest", action="store_true", default=False, + help="Dump manifest changes") + + parser.add_argument("--areas", default=None, + help="Load list of areas from file generated by --manifest") + parser.add_argument("-v", "--verbose", action="count", default=0, help="Verbose Output") args = parser.parse_args() + +def process_manifest(): + log("Processing manifest changes") + repo = Repo(zephyr_base) + old_manifest_content = repo.git.show(f"{args.commits[:-2]}:west.yml") + with open("west_old.yml", "w") as manifest: + manifest.write(old_manifest_content) + old_manifest = Manifest.from_file("west_old.yml") + new_manifest = Manifest.from_file("west.yml") + old_projs = set((p.name, p.revision) for p in old_manifest.projects) + new_projs = set((p.name, p.revision) for p in new_manifest.projects) + # Removed projects + rprojs = set(filter(lambda p: p[0] not in list(p[0] for p in new_projs), + old_projs - new_projs)) + # Updated projects + uprojs = set(filter(lambda p: p[0] in list(p[0] for p in old_projs), + new_projs - old_projs)) + # Added projects + aprojs = new_projs - old_projs - uprojs + + # All projs + projs = rprojs | uprojs | aprojs + projs_names = [name for name, rev in projs] + + log(f"found modified projects: {projs_names}") + areas = [] + for p in projs_names: + areas.append(f'West project: {p}') + + log(f'manifest areas: {areas}') + return areas + + +def dump_manifest_changes(gh, maintainer_file, number): + gh_repo = gh.get_repo(f"{args.org}/{args.repo}") + pr = gh_repo.get_pull(number) + fn = list(pr.get_files()) + areas = [] + for changed_file in fn: + log(f"file: {changed_file.filename}") + + if changed_file.filename in ['west.yml','submanifests/optional.yaml']: + changed_areas = process_manifest() + for _area in changed_areas: + area_match = maintainer_file.name2areas(_area) + if area_match: + areas.extend(area_match) + + log(f"Areas: {areas}") + # now dump the list of areas into a json file + with open("manifest_areas.json", "w") as f: + json.dump([area.name for area in areas], f, indent=4) + def process_pr(gh, maintainer_file, number): gh_repo = gh.get_repo(f"{args.org}/{args.repo}") @@ -67,13 +134,8 @@ def process_pr(gh, maintainer_file, number): found_maintainers = defaultdict(int) num_files = 0 - all_areas = set() fn = list(pr.get_files()) - for changed_file in fn: - if changed_file.filename in ['west.yml','submanifests/optional.yaml']: - break - if pr.commits == 1 and (pr.additions <= 1 and pr.deletions <= 1): labels = {'size: XS'} @@ -81,21 +143,52 @@ def process_pr(gh, maintainer_file, number): log(f"Too many files changed ({len(fn)}), skipping....") return + # areas where assignment happens if only area is affected + meta_areas = [ + 'Release Notes', + 'Documentation', + 'Samples' + ] + for changed_file in fn: + num_files += 1 log(f"file: {changed_file.filename}") - areas = maintainer_file.path2areas(changed_file.filename) + + areas = [] + if changed_file.filename in ['west.yml','submanifests/optional.yaml']: + if args.areas and Path(args.areas).is_file(): + with open(args.areas, "r") as f: + parsed_areas = json.load(f) + for _area in parsed_areas: + area_match = maintainer_file.name2areas(_area) + if area_match: + areas.extend(area_match) + else: + log(f"Manifest changes detected but no --areas file specified, skipping...") + continue + else: + areas = maintainer_file.path2areas(changed_file.filename) + + print(f"areas for {changed_file}: {areas}") if not areas: continue - all_areas.update(areas) + # instance of an area, for example a driver or a board, not APIs or subsys code. is_instance = False sorted_areas = sorted(areas, key=lambda x: 'Platform' in x.name, reverse=True) for area in sorted_areas: - c = 1 if not is_instance else 0 + # do not count cmake file changes, i.e. when there are changes to + # instances of an area listed in both the subsystem and the + # platform implementing it + if 'CMakeLists.txt' in changed_file.filename or area.name in meta_areas: + c = 0 + else: + c = 1 if not is_instance else 0 area_counter[area] += c + print(f"area counter: {area_counter}") labels.update(area.labels) # FIXME: Here we count the same file multiple times if it exists in # multiple areas with same maintainer @@ -122,22 +215,26 @@ def process_pr(gh, maintainer_file, number): log(f"Submitted by: {pr.user.login}") log(f"candidate maintainers: {_all_maintainers}") - assignees = [] - tmp_assignees = [] + ranked_assignees = [] + assignees = None # we start with areas with most files changed and pick the maintainer from the first one. # if the first area is an implementation, i.e. driver or platform, we # continue searching for any other areas involved for area, count in area_counter.items(): - if count == 0: + # if only meta area is affected, assign one of the maintainers of that area + if area.name in meta_areas and len(area_counter) == 1: + assignees = area.maintainers + break + # if no maintainers, skip + if count == 0 or len(area.maintainers) == 0: continue + # if there are maintainers, but no assignees yet, set them if len(area.maintainers) > 0: - tmp_assignees = area.maintainers if pr.user.login in area.maintainers: - # submitter = assignee, try to pick next area and - # assign someone else other than the submitter - # when there also other maintainers for the area - # assign them + # If submitter = assignee, try to pick next area and assign + # someone else other than the submitter, otherwise when there + # are other maintainers for the area, assign them. if len(area.maintainers) > 1: assignees = area.maintainers.copy() assignees.remove(pr.user.login) @@ -146,16 +243,25 @@ def process_pr(gh, maintainer_file, number): else: assignees = area.maintainers - if 'Platform' not in area.name: - break + # found a non-platform area that was changed, pick assignee from this + # area and put them on top of the list, otherwise just append. + if 'Platform' not in area.name: + ranked_assignees.insert(0, area.maintainers) + break + else: + ranked_assignees.append(area.maintainers) - if tmp_assignees and not assignees: - assignees = tmp_assignees + if ranked_assignees: + assignees = ranked_assignees[0] if assignees: prop = (found_maintainers[assignees[0]] / num_files) * 100 log(f"Picked assignees: {assignees} ({prop:.2f}% ownership)") log("+++++++++++++++++++++++++") + elif len(_all_maintainers) > 0: + # if we have maintainers found, but could not pick one based on area, + # then pick the one with most changes + assignees = [next(iter(_all_maintainers))] # Set labels if labels: @@ -206,21 +312,24 @@ def process_pr(gh, maintainer_file, number): if len(existing_reviewers) < 15: reviewer_vacancy = 15 - len(existing_reviewers) reviewers = reviewers[:reviewer_vacancy] - - if reviewers: - try: - log(f"adding reviewers {reviewers}...") - if not args.dry_run: - pr.create_review_request(reviewers=reviewers) - except GithubException: - log("cant add reviewer") else: log("not adding reviewers because the existing reviewer count is greater than or " - "equal to 15") + "equal to 15. Adding maintainers of all areas as reviewers instead.") + # FIXME: Here we could also add collaborators of the areas most + # affected, i.e. the one with the final assigne. + reviewers = list(_all_maintainers.keys()) + + if reviewers: + try: + log(f"adding reviewers {reviewers}...") + if not args.dry_run: + pr.create_review_request(reviewers=reviewers) + except GithubException: + log("can't add reviewer") ms = [] # assignees - if assignees and not pr.assignee: + if assignees and (not pr.assignee or args.dry_run): try: for assignee in assignees: u = gh.get_user(assignee) @@ -358,7 +467,9 @@ def main(): gh = Github(token) maintainer_file = Maintainers(args.maintainer_file) - if args.pull_request: + if args.pull_request and args.manifest: + dump_manifest_changes(gh, maintainer_file, args.pull_request) + elif args.pull_request: process_pr(gh, maintainer_file, args.pull_request) elif args.issue: process_issue(gh, maintainer_file, args.issue)