From df9d583f4f9b3cd6967f8f13be08ab5d960ae1ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20Cab=C3=A9?= Date: Tue, 18 Mar 2025 14:26:24 +0100 Subject: [PATCH 1/2] ci: scripts: test maintainers UI MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit xxx Signed-off-by: Benjamin Cabé --- .github/workflows/maintainers-web.yml | 62 ++ scripts/generate_maintainers_web.py | 814 ++++++++++++++++++++++++++ 2 files changed, 876 insertions(+) create mode 100644 .github/workflows/maintainers-web.yml create mode 100755 scripts/generate_maintainers_web.py diff --git a/.github/workflows/maintainers-web.yml b/.github/workflows/maintainers-web.yml new file mode 100644 index 0000000000000..c3c65430de145 --- /dev/null +++ b/.github/workflows/maintainers-web.yml @@ -0,0 +1,62 @@ +name: Build and Deploy Maintainers Visualization + +on: + push: + branches: [ main ] + schedule: + # Run daily at 00:00 UTC to keep the visualization up to date + - cron: '0 0 * * *' + workflow_dispatch: # Allow manual trigger + +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment +concurrency: + group: "pages" + cancel-in-progress: true + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pyyaml tabulate + + - name: Create output directory + run: mkdir -p _site + + - name: Generate visualization + run: | + python scripts/generate_maintainers_web.py + mv index.html maintainers_data.json _site/ + + - name: Setup Pages + uses: actions/configure-pages@v4 + + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + path: _site + + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + needs: build + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 \ No newline at end of file diff --git a/scripts/generate_maintainers_web.py b/scripts/generate_maintainers_web.py new file mode 100755 index 0000000000000..17fbc072f7242 --- /dev/null +++ b/scripts/generate_maintainers_web.py @@ -0,0 +1,814 @@ +#!/usr/bin/env python3 + +import os +import json +from pathlib import Path +from get_maintainer import Maintainers + +def get_repo_structure(start_path='.'): + """Generate a tree structure of the repository.""" + structure = {} + + for root, dirs, files in os.walk(start_path): + # Skip .git directory and other hidden directories + dirs[:] = [d for d in dirs if not d.startswith('.')] + + rel_path = os.path.relpath(root, start_path) + if rel_path == '.': + rel_path = '' + + current_dict = structure + if rel_path: + for part in rel_path.split(os.sep): + current_dict = current_dict[part] + + # Add directories + for d in dirs: + current_dict[d] = {} + + # Add files + for f in files: + current_dict[f] = None + + return structure + +def get_maintainer_info(path, maintainers): + """Get maintainer information for a path.""" + areas = maintainers.path2areas(path) + if not areas: + return { + 'status': 'unmaintained', + 'maintainers': [], + 'collaborators': [], + 'labels': [] + } + + # Combine information from all matching areas + status = 'unmaintained' + all_maintainers = set() + all_collaborators = set() + all_labels = set() + + # Determine status based on precedence: maintained > odd fixes > unmaintained + if any(area.status == 'maintained' for area in areas): + status = 'maintained' + elif any(area.status == 'odd fixes' for area in areas): + status = 'odd fixes' + # If neither 'maintained' nor 'odd fixes' are found, status remains 'unmaintained' + + for area in areas: + # Collect all maintainers, collaborators, and labels regardless of the final status + all_maintainers.update(area.maintainers) + all_collaborators.update(area.collaborators) + all_labels.update(area.labels) + + return { + 'status': status, + 'maintainers': list(all_maintainers), + 'collaborators': list(all_collaborators), + 'labels': list(all_labels) + } + +def process_structure(structure, maintainers, current_path=''): + """Process the structure and add maintainer information.""" + result = {} + + for name, content in structure.items(): + path = os.path.join(current_path, name) + if content is None: # It's a file + result[name] = { + 'type': 'file', + 'maintainer_info': get_maintainer_info(path, maintainers) + } + else: # It's a directory + # Process contents first + processed_contents = process_structure(content, maintainers, path) + + # Get direct maintainer info for the directory + dir_maintainer_info = get_maintainer_info(path, maintainers) + + # If directory isn't directly maintained, analyze contents + if dir_maintainer_info['status'] == 'unmaintained': + has_contents = False + all_maintained = True + all_odd_fixes = True + all_maintainers = set() + all_collaborators = set() + all_labels = set() + + # Recursively check contents status + def check_contents_status(contents): + nonlocal has_contents, all_maintained, all_odd_fixes + nonlocal all_maintainers, all_collaborators, all_labels + + for item in contents.values(): + has_contents = True + status = item['maintainer_info']['status'] + + # Update status flags + if status == 'unmaintained': + all_maintained = False + all_odd_fixes = False + elif status == 'odd fixes': + all_maintained = False + + # Collect maintainers from contents + all_maintainers.update(item['maintainer_info'].get('maintainers', [])) + all_collaborators.update(item['maintainer_info'].get('collaborators', [])) + all_labels.update(item['maintainer_info'].get('labels', [])) + + # If it's a directory, check its contents too + if item['type'] == 'directory' and 'contents' in item: + check_contents_status(item['contents']) + + check_contents_status(processed_contents) + + # Update directory status based on contents + if has_contents: + if all_maintained: + dir_maintainer_info['status'] = 'maintained' + elif all_odd_fixes: + dir_maintainer_info['status'] = 'odd fixes' + + if dir_maintainer_info['status'] != 'unmaintained': + # Include aggregated maintainers from subdirectories + dir_maintainer_info['maintainers'] = list(all_maintainers) + dir_maintainer_info['collaborators'] = list(all_collaborators) + dir_maintainer_info['labels'] = list(all_labels) + dir_maintainer_info['inherited'] = True + dir_maintainer_info['inherited_from'] = 'subdirectories' + + result[name] = { + 'type': 'directory', + 'maintainer_info': dir_maintainer_info, + 'contents': processed_contents + } + + return result + +def generate_html(data): + """Generate the HTML file with the interactive visualization.""" + # Add metadata with generation timestamp + from datetime import datetime + + data_with_metadata = {'generated_at': datetime.now().isoformat(), 'data': data} + + # Write data to JSON file + with open('maintainers_data.json', 'w') as f: + json.dump(data_with_metadata, f) + + # Rename the HTML file to index.html for GitHub Pages + html = ''' + + + + Zephyr Repository Maintainership + + + + + + +
+

Zephyr Repository Maintainership

+ +
+
+
+ +
+
+
+
+
+
+
+
+
+
Legend
+
+ Maintained +
+
+ Maintained (inherited) +
+
+ Odd Fixes +
+
+ Unmaintained +
+
+
+
+
+
+ + +
+ + + + + ''' + + # Write to index.html instead of maintainers_visualization.html + with open('index.html', 'w') as f: + f.write(html) + +def main(): + # Get the repository structure + structure = get_repo_structure() + + # Initialize maintainers + maintainers = Maintainers() + + # Process the structure and add maintainer information + processed_structure = process_structure(structure, maintainers) + + # Generate the HTML visualization + generate_html(processed_structure) + print("Generated index.html") + +if __name__ == '__main__': + main() \ No newline at end of file From c9d2fd446f3a619e9805638e4ece7eb6a39069c1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 11 Nov 2025 17:24:34 +0000 Subject: [PATCH 2/2] Initial plan