diff --git a/.gitignore b/.gitignore index 716b5d33..823d37e3 100644 --- a/.gitignore +++ b/.gitignore @@ -174,4 +174,5 @@ cython_debug/ .pypirc # Local Netlify folder -.netlify \ No newline at end of file +.netlify +backend/app/database/falkor/code-graph-backend/repositories/ diff --git a/backend/app/api/router.py b/backend/app/api/router.py index 67cd1e56..8ea897ac 100644 --- a/backend/app/api/router.py +++ b/backend/app/api/router.py @@ -2,6 +2,7 @@ from .v1.auth import router as auth_router from .v1.health import router as health_router from .v1.integrations import router as integrations_router +from .v1.repo_stats import router as repo_stats_router api_router = APIRouter() @@ -23,4 +24,10 @@ tags=["Integrations"] ) +api_router.include_router( + repo_stats_router, + prefix="/api", # Note: Keeping "/api" prefix as frontend depends on it + tags=["Repository Stats"] +) + __all__ = ["api_router"] diff --git a/backend/app/api/v1/repo_stats.py b/backend/app/api/v1/repo_stats.py new file mode 100644 index 00000000..979fc601 --- /dev/null +++ b/backend/app/api/v1/repo_stats.py @@ -0,0 +1,185 @@ +""" +Repository stats endpoint for analyzing GitHub repositories. +""" +import logging +import re +from typing import Any, Dict, List, Optional +from fastapi import APIRouter, HTTPException +from pydantic import BaseModel +from app.services.github.repo_stats import GitHubRepoStatsService + +logger = logging.getLogger(__name__) + +router = APIRouter() + + +class RepoStatsRequest(BaseModel): + repo_url: str + + +class AuthorInfo(BaseModel): + login: str | None = None + avatar_url: str | None = None + profile_url: str | None = None + + +class ContributorInfo(BaseModel): + login: str | None = None + avatar_url: str | None = None + profile_url: str | None = None + contributions: int = 0 + type: str = "User" + + +class PullRequestDetail(BaseModel): + number: int + title: str + state: str + url: str + created_at: str | None = None + updated_at: str | None = None + author: AuthorInfo + labels: List[str] = [] + comments: int = 0 + draft: bool = False + + +class PullRequestStats(BaseModel): + open: int = 0 + closed: int = 0 + merged: int = 0 + total: int = 0 + details: List[PullRequestDetail] = [] + + +class IssueDetail(BaseModel): + number: int + title: str + state: str + url: str + created_at: str | None = None + author: AuthorInfo + labels: List[str] = [] + comments: int = 0 + + +class IssueStats(BaseModel): + open: int = 0 + closed: int = 0 + total: int = 0 + details: List[IssueDetail] = [] + + +class CommitActivity(BaseModel): + week: str + total: int = 0 + days: List[int] = [] + + +class ReleaseInfo(BaseModel): + tag_name: str + name: str | None = None + published_at: str | None = None + url: str | None = None + prerelease: bool = False + + +class RepositoryInfo(BaseModel): + name: str + full_name: str + description: str | None = None + url: str + stars: int = 0 + forks: int = 0 + watchers: int = 0 + open_issues_count: int = 0 + default_branch: str | None = None + created_at: str | None = None + updated_at: str | None = None + pushed_at: str | None = None + topics: List[str] = [] + license: str | None = None + + +class Metrics(BaseModel): + total_contributors: int = 0 + total_commits_recent: int = 0 + stars: int = 0 + forks: int = 0 + open_prs: int = 0 + open_issues: int = 0 + + +class RepoStatsResponse(BaseModel): + status: str + repo: str | None = None + message: str | None = None + repository: RepositoryInfo | None = None + contributors: List[ContributorInfo] = [] + pull_requests: PullRequestStats | None = None + issues: IssueStats | None = None + commit_activity: List[CommitActivity] = [] + languages: Dict[str, int] = {} + releases: List[ReleaseInfo] = [] + metrics: Metrics | None = None + + +def parse_repo_url(repo_input: str) -> tuple[str, str]: + """Parse repository URL or owner/repo format""" + repo_input = repo_input.strip().rstrip('/').rstrip('.git') + + patterns = [ + (r'github\.com[:/]([^/]+)/([^/]+?)(?:\.git)?$', 'url'), + (r'^([a-zA-Z0-9][-a-zA-Z0-9]*)/([a-zA-Z0-9._-]+)$', 'short') + ] + + for pattern, _ in patterns: + match = re.search(pattern, repo_input) + if match: + owner, repo = match.groups() + return owner, repo + + raise ValueError( + f"Invalid repository format: '{repo_input}'. " + "Expected: 'owner/repo' or 'https://github.com/owner/repo'" + ) + + +@router.post("/repo-stats", response_model=RepoStatsResponse) +async def analyze_repository(request: RepoStatsRequest): + """ + Analyze a GitHub repository and return comprehensive stats. + + Returns contributors, pull requests, issues, commit activity, + languages, and other repository metrics. + """ + try: + logger.info(f"Received repo-stats request for: {request.repo_url}") + + # Parse the repository URL + try: + owner, repo = parse_repo_url(request.repo_url) + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) from e + + logger.info(f"Fetching stats for {owner}/{repo}") + + # Fetch comprehensive stats from GitHub + async with GitHubRepoStatsService() as stats_service: + result = await stats_service.get_comprehensive_stats(owner, repo) + + logger.info(f"Successfully fetched stats for {owner}/{repo}") + + return result + + except HTTPException: + raise + except ValueError as e: + logger.exception(f"Value error: {e}") + raise HTTPException(status_code=404, detail=str(e)) from e + except Exception as e: + logger.exception(f"Error analyzing repository: {e}") + raise HTTPException( + status_code=500, + detail=f"Failed to analyze repository: {str(e)}" + ) from e diff --git a/backend/app/database/falkor/code-graph-backend/.gitignore b/backend/app/database/falkor/code-graph-backend/.gitignore index 6d844eb7..222828d5 100644 --- a/backend/app/database/falkor/code-graph-backend/.gitignore +++ b/backend/app/database/falkor/code-graph-backend/.gitignore @@ -42,3 +42,4 @@ pytest_cache/ .env *.sqlite3 .vercel +repositories/ diff --git a/backend/app/services/codegraph/repo_service.py b/backend/app/services/codegraph/repo_service.py index eba4fca8..240c4648 100644 --- a/backend/app/services/codegraph/repo_service.py +++ b/backend/app/services/codegraph/repo_service.py @@ -64,13 +64,20 @@ async def index_repo(self, repo_input: str, discord_id: str) -> Dict[str, Any]: status = repo_data['indexing_status'] if status == 'completed': + # Return existing data as success instead of error return { - "status": "error", - "message": f"Repository already indexed. Graph: `{repo_data['graph_name']}`" + "status": "success", + "repo": repo_info['full_name'], + "graph_name": repo_data['graph_name'], + "nodes": repo_data.get('node_count', 0), + "edges": repo_data.get('edge_count', 0), + "message": "Repository already indexed." } elif status == 'pending': return { - "status": "error", + "status": "pending", + "repo": repo_info['full_name'], + "graph_name": repo_data['graph_name'], "message": "Repository indexing in progress. Please wait." } # If failed, we'll allow re-indexing by updating the existing record diff --git a/backend/app/services/github/repo_stats.py b/backend/app/services/github/repo_stats.py new file mode 100644 index 00000000..95b7b39d --- /dev/null +++ b/backend/app/services/github/repo_stats.py @@ -0,0 +1,317 @@ +""" +GitHub Repository Statistics Service. +Fetches comprehensive stats for a GitHub repository including contributors, +pull requests, issues, and commit activity. +""" +import logging +import aiohttp +import asyncio +from typing import Dict, List, Optional, Any +from datetime import datetime, timedelta +from app.core.config import settings + +logger = logging.getLogger(__name__) + + +class GitHubRepoStatsService: + """ + Service to fetch comprehensive statistics for a GitHub repository. + """ + + def __init__(self): + if not settings.github_token: + raise ValueError("GitHub token not configured in environment variables") + + self.headers = { + "Authorization": f"token {settings.github_token}", + "Accept": "application/vnd.github.v3+json", + "User-Agent": "DevRel-AI-Bot/1.0" + } + self.base_url = "https://api.github.com" + self.session = None + + async def __aenter__(self): + """Create async HTTP session""" + timeout = aiohttp.ClientTimeout(total=60, connect=10, sock_read=30) + connector = aiohttp.TCPConnector(limit=50, limit_per_host=10) + self.session = aiohttp.ClientSession( + headers=self.headers, + timeout=timeout, + connector=connector + ) + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + """Close async HTTP session""" + if self.session: + await self.session.close() + + async def _make_request(self, url: str, params: Dict | None = None) -> Optional[Any]: + """Make a GET request to GitHub API""" + try: + async with self.session.get(url, params=params) as response: + if response.status == 200: + return await response.json() + elif response.status == 404: + logger.warning(f"GitHub API 404: {url}") + return None + elif response.status == 403: + logger.error(f"GitHub API rate limit exceeded: {url}") + return None + else: + logger.error(f"GitHub API error {response.status}: {url}") + return None + except asyncio.TimeoutError: + logger.error(f"Timeout accessing GitHub API: {url}") + return None + except Exception as e: + logger.error(f"Error making request to {url}: {str(e)}") + return None + + async def get_repo_info(self, owner: str, repo: str) -> Optional[Dict]: + """Fetch basic repository information""" + url = f"{self.base_url}/repos/{owner}/{repo}" + return await self._make_request(url) + + async def get_contributors(self, owner: str, repo: str, max_contributors: int = 30) -> List[Dict]: + """Fetch repository contributors""" + url = f"{self.base_url}/repos/{owner}/{repo}/contributors" + params = {"per_page": max_contributors, "anon": "false"} + contributors = await self._make_request(url, params) + + if contributors and isinstance(contributors, list): + return [ + { + "login": c.get("login"), + "avatar_url": c.get("avatar_url"), + "profile_url": c.get("html_url"), + "contributions": c.get("contributions", 0), + "type": c.get("type", "User") + } + for c in contributors + ] + return [] + + async def get_pull_requests(self, owner: str, repo: str, state: str = "all", max_prs: int = 50) -> Dict: + """Fetch pull requests with statistics""" + url = f"{self.base_url}/repos/{owner}/{repo}/pulls" + params = {"state": state, "per_page": max_prs, "sort": "updated", "direction": "desc"} + prs = await self._make_request(url, params) + + if not prs or not isinstance(prs, list): + return {"open": 0, "closed": 0, "merged": 0, "total": 0, "details": []} + + open_count = 0 + closed_count = 0 + merged_count = 0 + details = [] + + for pr in prs: + pr_state = pr.get("state", "open") + is_merged = pr.get("merged_at") is not None + + if pr_state == "open": + open_count += 1 + display_state = "open" + elif is_merged: + merged_count += 1 + display_state = "merged" + else: + closed_count += 1 + display_state = "closed" + + details.append({ + "number": pr.get("number"), + "title": pr.get("title"), + "state": display_state, + "url": pr.get("html_url"), + "created_at": pr.get("created_at"), + "updated_at": pr.get("updated_at"), + "author": { + "login": pr.get("user", {}).get("login"), + "avatar_url": pr.get("user", {}).get("avatar_url"), + "profile_url": pr.get("user", {}).get("html_url") + }, + "labels": [label.get("name") for label in pr.get("labels", [])], + "comments": pr.get("comments", 0), + "draft": pr.get("draft", False) + }) + + return { + "open": open_count, + "closed": closed_count, + "merged": merged_count, + "total": len(prs), + "details": details + } + + async def get_issues(self, owner: str, repo: str, state: str = "all", max_issues: int = 50) -> Dict: + """Fetch issues (excluding pull requests)""" + url = f"{self.base_url}/repos/{owner}/{repo}/issues" + params = {"state": state, "per_page": max_issues, "sort": "updated", "direction": "desc"} + issues = await self._make_request(url, params) + + if not issues or not isinstance(issues, list): + return {"open": 0, "closed": 0, "total": 0, "details": []} + + # Filter out pull requests (they appear in issues endpoint too) + actual_issues = [i for i in issues if "pull_request" not in i] + + open_count = sum(1 for i in actual_issues if i.get("state") == "open") + closed_count = sum(1 for i in actual_issues if i.get("state") == "closed") + + details = [ + { + "number": i.get("number"), + "title": i.get("title"), + "state": i.get("state"), + "url": i.get("html_url"), + "created_at": i.get("created_at"), + "author": { + "login": i.get("user", {}).get("login"), + "avatar_url": i.get("user", {}).get("avatar_url"), + "profile_url": i.get("user", {}).get("html_url") + }, + "labels": [label.get("name") for label in i.get("labels", [])], + "comments": i.get("comments", 0) + } + for i in actual_issues + ] + + return { + "open": open_count, + "closed": closed_count, + "total": len(actual_issues), + "details": details + } + + async def get_commit_activity(self, owner: str, repo: str) -> List[Dict]: + """Fetch weekly commit activity for the last year""" + url = f"{self.base_url}/repos/{owner}/{repo}/stats/commit_activity" + activity = await self._make_request(url) + + if activity and isinstance(activity, list): + # Return last 12 weeks of data + recent_activity = activity[-12:] if len(activity) > 12 else activity + return [ + { + "week": datetime.fromtimestamp(week.get("week", 0)).strftime("%Y-%m-%d"), + "total": week.get("total", 0), + "days": week.get("days", [0] * 7) + } + for week in recent_activity + ] + return [] + + async def get_languages(self, owner: str, repo: str) -> Dict[str, int]: + """Fetch repository languages""" + url = f"{self.base_url}/repos/{owner}/{repo}/languages" + languages = await self._make_request(url) + return languages if languages else {} + + async def get_releases(self, owner: str, repo: str, max_releases: int = 10) -> List[Dict]: + """Fetch repository releases""" + url = f"{self.base_url}/repos/{owner}/{repo}/releases" + params = {"per_page": max_releases} + releases = await self._make_request(url, params) + + if releases and isinstance(releases, list): + return [ + { + "tag_name": r.get("tag_name"), + "name": r.get("name"), + "published_at": r.get("published_at"), + "url": r.get("html_url"), + "prerelease": r.get("prerelease", False) + } + for r in releases + ] + return [] + + async def get_comprehensive_stats(self, owner: str, repo: str) -> Dict[str, Any]: + """ + Fetch comprehensive repository statistics. + Returns all data needed for the dashboard. + """ + logger.info(f"Fetching comprehensive stats for {owner}/{repo}") + + try: + # Fetch all data concurrently + repo_info, contributors, pull_requests, issues, commit_activity, languages, releases = await asyncio.gather( + self.get_repo_info(owner, repo), + self.get_contributors(owner, repo), + self.get_pull_requests(owner, repo), + self.get_issues(owner, repo), + self.get_commit_activity(owner, repo), + self.get_languages(owner, repo), + self.get_releases(owner, repo), + return_exceptions=True + ) + + # Handle any exceptions from gather + if isinstance(repo_info, Exception): + logger.error(f"Error fetching repo info: {repo_info}") + repo_info = None + if isinstance(contributors, Exception): + logger.error(f"Error fetching contributors: {contributors}") + contributors = [] + if isinstance(pull_requests, Exception): + logger.error(f"Error fetching pull requests: {pull_requests}") + pull_requests = {"open": 0, "closed": 0, "merged": 0, "total": 0, "details": []} + if isinstance(issues, Exception): + logger.error(f"Error fetching issues: {issues}") + issues = {"open": 0, "closed": 0, "total": 0, "details": []} + if isinstance(commit_activity, Exception): + logger.error(f"Error fetching commit activity: {commit_activity}") + commit_activity = [] + if isinstance(languages, Exception): + logger.error(f"Error fetching languages: {languages}") + languages = {} + if isinstance(releases, Exception): + logger.error(f"Error fetching releases: {releases}") + releases = [] + + if not repo_info: + raise ValueError(f"Repository {owner}/{repo} not found") + + # Calculate additional metrics + total_commits = sum(week.get("total", 0) for week in commit_activity) if commit_activity else 0 + + return { + "status": "success", + "repo": f"{owner}/{repo}", + "repository": { + "name": repo_info.get("name"), + "full_name": repo_info.get("full_name"), + "description": repo_info.get("description"), + "url": repo_info.get("html_url"), + "stars": repo_info.get("stargazers_count", 0), + "forks": repo_info.get("forks_count", 0), + "watchers": repo_info.get("watchers_count", 0), + "open_issues_count": repo_info.get("open_issues_count", 0), + "default_branch": repo_info.get("default_branch"), + "created_at": repo_info.get("created_at"), + "updated_at": repo_info.get("updated_at"), + "pushed_at": repo_info.get("pushed_at"), + "topics": repo_info.get("topics", []), + "license": repo_info.get("license", {}).get("name") if repo_info.get("license") else None + }, + "contributors": contributors, + "pull_requests": pull_requests, + "issues": issues, + "commit_activity": commit_activity, + "languages": languages, + "releases": releases, + "metrics": { + "total_contributors": len(contributors), + "total_commits_recent": total_commits, + "stars": repo_info.get("stargazers_count", 0), + "forks": repo_info.get("forks_count", 0), + "open_prs": pull_requests.get("open", 0), + "open_issues": issues.get("open", 0) + } + } + + except Exception as e: + logger.exception(f"Error fetching comprehensive stats for {owner}/{repo}: {str(e)}") + raise diff --git a/backend/database/02_create_indexed_repositories.sql b/backend/database/02_create_indexed_repositories.sql new file mode 100644 index 00000000..12a9bcd2 --- /dev/null +++ b/backend/database/02_create_indexed_repositories.sql @@ -0,0 +1,38 @@ +-- Table for storing indexed repository information +CREATE TABLE IF NOT EXISTS indexed_repositories ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + repository_full_name VARCHAR(255) NOT NULL, -- e.g., 'owner/repo' + graph_name VARCHAR(255) NOT NULL, -- Name of the graph in FalkorDB + indexing_status VARCHAR(50) NOT NULL DEFAULT 'pending' CHECK (indexing_status IN ('pending', 'completed', 'failed')), + indexed_by_discord_id VARCHAR(255), -- Discord user who requested indexing + indexed_at TIMESTAMPTZ, -- When indexing completed + node_count INTEGER DEFAULT 0, -- Number of nodes in the graph + edge_count INTEGER DEFAULT 0, -- Number of edges in the graph + last_error TEXT, -- Last error message if failed + is_deleted BOOLEAN NOT NULL DEFAULT false, -- Soft delete flag + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + + -- Ensure unique repository entries (that are not deleted) + UNIQUE(repository_full_name) WHERE (is_deleted = false) +); + +-- Create indexes for better query performance +CREATE INDEX IF NOT EXISTS idx_indexed_repos_full_name ON indexed_repositories(repository_full_name); +CREATE INDEX IF NOT EXISTS idx_indexed_repos_status ON indexed_repositories(indexing_status); +CREATE INDEX IF NOT EXISTS idx_indexed_repos_is_deleted ON indexed_repositories(is_deleted); +CREATE INDEX IF NOT EXISTS idx_indexed_repos_discord_id ON indexed_repositories(indexed_by_discord_id); + +-- Create trigger to automatically update updated_at +CREATE TRIGGER update_indexed_repositories_updated_at + BEFORE UPDATE ON indexed_repositories + FOR EACH ROW + EXECUTE FUNCTION update_updated_at_column(); + +-- Note: No RLS policies are added here because this table is accessed by the backend service +-- If you want to enable RLS, you'll need to add appropriate policies + +-- Add helpful comments +COMMENT ON TABLE indexed_repositories IS 'Stores metadata about repositories that have been indexed for code graph analysis'; +COMMENT ON COLUMN indexed_repositories.graph_name IS 'The name of the graph created in FalkorDB'; +COMMENT ON COLUMN indexed_repositories.indexing_status IS 'Current status: pending, completed, or failed'; diff --git a/docs/INSTALL_GUIDE.md b/docs/INSTALL_GUIDE.md index ec6548a0..a5d4894e 100644 --- a/docs/INSTALL_GUIDE.md +++ b/docs/INSTALL_GUIDE.md @@ -47,12 +47,21 @@ Invoke-Expression (poetry env activate) ``` 5. **Set up environment variables** + You need to set up environment variables in three locations: **Project Root**, **Backend**, and **Frontend**. + ```sh -# Copy the example environment file +# 1. Project Root cp env.example .env -# Edit .env with your API keys and configuration -nano .env # or use your preferred editor +# 2. Backend +cp backend/.env.example backend/.env + +# 3. Frontend +# Create a .env file in frontend/ directory +cp frontend/.env.example frontend/.env + + +# Edit the .env files with your API keys and configuration ``` 6. **Set up Docker container** @@ -71,7 +80,12 @@ Go to docker dekstop and start the containers cd backend poetry run python main.py # Terminal 1 poetry run python start_github_mcp_server.py # Terminal 2 (Start MCP server) -flask --app api/index.py run --debug --port 5000 # Terminal 3 (Start graphDB) + +# Terminal 3 (Start graphDB) - Requires Python 3.11+ +cd app/database/falkor/code-graph-backend +poetry env use python3.11 # First time setup only +poetry install # First time setup only +poetry run flask --app api.index:app run --debug --port 5000 ``` 9. **Start the frontend** (in a new terminal) @@ -166,6 +180,19 @@ The database will be available at `http://localhost:8080` ### Supabase Database Supabase provides the PostgreSQL database for user data and authentication. The connection is configured via environment variables. +### Supabase Migrations +You must run the SQL migrations to set up the required database tables. + +1. Go to your [Supabase Dashboard](https://supabase.com/dashboard) -> Select your project -> **SQL Editor**. +2. **Run Integration Tables Migration**: + - Open `backend/database/01_create_integration_tables.sql`. + - Copy the content and paste it into the SQL Editor. + - Click **Run**. +3. **Run Indexed Repositories Migration**: + - Open `backend/database/02_create_indexed_repositories.sql`. + - Copy the content and paste it into the SQL Editor. + - Click **Run**. + ## Current Features ### Discord Integration diff --git a/poetry.lock b/poetry.lock index 7d46dd61..74078d1f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -746,27 +746,6 @@ files = [ [package.dependencies] packaging = "*" -[[package]] -name = "discord-py" -version = "2.4.0" -description = "A Python wrapper for the Discord API" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "discord.py-2.4.0-py3-none-any.whl", hash = "sha256:b8af6711c70f7e62160bfbecb55be699b5cb69d007426759ab8ab06b1bd77d1d"}, - {file = "discord_py-2.4.0.tar.gz", hash = "sha256:d07cb2a223a185873a1d0ee78b9faa9597e45b3f6186df21a95cec1e9bcdc9a5"}, -] - -[package.dependencies] -aiohttp = ">=3.7.4,<4" - -[package.extras] -docs = ["sphinx (==4.4.0)", "sphinx-inline-tabs (==2023.4.21)", "sphinxcontrib-applehelp (==1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (==2.0.1)", "sphinxcontrib-jsmath (==1.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)", "sphinxcontrib-trio (==1.1.2)", "sphinxcontrib-websupport (==1.2.4)", "typing-extensions (>=4.3,<5)"] -speed = ["Brotli", "aiodns (>=1.1)", "cchardet (==2.1.7) ; python_version < \"3.10\"", "orjson (>=3.5.4)"] -test = ["coverage[toml]", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mock", "typing-extensions (>=4.3,<5)", "tzdata ; sys_platform == \"win32\""] -voice = ["PyNaCl (>=1.3.0,<1.6)"] - [[package]] name = "distro" version = "1.9.0" @@ -1396,6 +1375,8 @@ files = [ {file = "greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d"}, {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5"}, {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f47617f698838ba98f4ff4189aef02e7343952df3a615f847bb575c3feb177a7"}, + {file = "greenlet-3.2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af41be48a4f60429d5cad9d22175217805098a9ef7c40bfef44f7669fb9d74d8"}, {file = "greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c"}, {file = "greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2"}, {file = "greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246"}, @@ -1405,6 +1386,8 @@ files = [ {file = "greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8"}, {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52"}, {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c"}, + {file = "greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5"}, {file = "greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9"}, {file = "greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd"}, {file = "greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb"}, @@ -1414,6 +1397,8 @@ files = [ {file = "greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0"}, {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0"}, {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ee7a6ec486883397d70eec05059353b8e83eca9168b9f3f9a361971e77e0bcd0"}, + {file = "greenlet-3.2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:326d234cbf337c9c3def0676412eb7040a35a768efc92504b947b3e9cfc7543d"}, {file = "greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02"}, {file = "greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31"}, {file = "greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945"}, @@ -1423,6 +1408,8 @@ files = [ {file = "greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671"}, {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b"}, {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e343822feb58ac4d0a1211bd9399de2b3a04963ddeec21530fc426cc121f19b"}, + {file = "greenlet-3.2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ca7f6f1f2649b89ce02f6f229d7c19f680a6238af656f61e0115b24857917929"}, {file = "greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b"}, {file = "greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f"}, @@ -1430,6 +1417,8 @@ files = [ {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735"}, {file = "greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2917bdf657f5859fbf3386b12d68ede4cf1f04c90c3a6bc1f013dd68a22e2269"}, + {file = "greenlet-3.2.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:015d48959d4add5d6c9f6c5210ee3803a830dce46356e3bc326d6776bde54681"}, {file = "greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01"}, {file = "greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c"}, {file = "greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d"}, @@ -1439,6 +1428,8 @@ files = [ {file = "greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df"}, {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594"}, {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:28a3c6b7cd72a96f61b0e4b2a36f681025b60ae4779cc73c1535eb5f29560b10"}, + {file = "greenlet-3.2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:52206cd642670b0b320a1fd1cbfd95bca0e043179c1d8a045f2c6109dfe973be"}, {file = "greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b"}, {file = "greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb"}, {file = "greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d"}, @@ -6527,4 +6518,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.10,<3.14" -content-hash = "d1f57ec66dbe816e607f52b1adcde2a92d44e901ec1c70b6faa80b2d1ca39282" +content-hash = "ba846a25328e3d7088b67f3bcd68ed5f40ce8bbc8b591805f075ef4fc2a1beee"