-
Notifications
You must be signed in to change notification settings - Fork 117
Depreciated code and warning fix #140
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. Weβll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
| @@ -1,8 +1,8 @@ | ||||||||||
| import logging | ||||||||||
| import os | ||||||||||
| from typing import Dict, Any, Optional, List, Union | ||||||||||
| import aiohttp | ||||||||||
| import asyncio | ||||||||||
| import config | ||||||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Use package-qualified config import This module runs inside the -import config
+from backend import configπ Committable suggestion
Suggested change
π€ Prompt for AI Agents |
||||||||||
|
|
||||||||||
| logger = logging.getLogger(__name__) | ||||||||||
|
|
||||||||||
|
|
@@ -13,7 +13,7 @@ def __init__(self, mcp_server_url: str = "http://localhost:8001"): | |||||||||
| self.mcp_server_url = mcp_server_url | ||||||||||
| self.session: Optional[aiohttp.ClientSession] = None | ||||||||||
| # Default org pulled from environment | ||||||||||
| self.org = os.getenv("GITHUB_ORG", "Aossie-org") | ||||||||||
| self.org = config.GITHUB_ORG | ||||||||||
|
|
||||||||||
| async def __aenter__(self): | ||||||||||
| # Async context manager entry | ||||||||||
|
|
||||||||||
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
|
|
@@ -2,17 +2,11 @@ | |||||
| import requests | ||||||
| import asyncio | ||||||
| from typing import Optional | ||||||
| from dotenv import load_dotenv, find_dotenv | ||||||
|
|
||||||
| dotenv_path = find_dotenv(usecwd=True) | ||||||
| if dotenv_path: | ||||||
| load_dotenv(dotenv_path=dotenv_path) | ||||||
| else: | ||||||
| load_dotenv() | ||||||
| import config | ||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Import config from the backend package Same module-resolution issue hereβimport via -import config
+from backend import configπ Committable suggestion
Suggested change
π€ Prompt for AI Agents |
||||||
|
|
||||||
| class GitHubMCPService: | ||||||
| def __init__(self, token: str = None): | ||||||
| self.token = token or os.getenv("GITHUB_TOKEN") | ||||||
| self.token = token or config.GITHUB_TOKEN | ||||||
| if not self.token: | ||||||
| raise ValueError("GitHub token required; export as GITHUB_TOKEN or place in backend/.env file") | ||||||
| self.base_url = "https://api.github.com" | ||||||
|
|
@@ -110,7 +104,7 @@ def _headers(self): | |||||
|
|
||||||
|
|
||||||
| def _get_service(token: Optional[str] = None) -> GitHubMCPService: | ||||||
| return GitHubMCPService(token=token or os.getenv("GITHUB_TOKEN")) | ||||||
| return GitHubMCPService(token=token or config.GITHUB_TOKEN) | ||||||
|
|
||||||
| async def get_org_repositories(org: str): | ||||||
| try: | ||||||
|
|
||||||
| Original file line number | Diff line number | Diff line change | ||||
|---|---|---|---|---|---|---|
| @@ -1,12 +1,12 @@ | ||||||
| import os | ||||||
| import config | ||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Import config via the backend package For the same reason noted in the MCP client, this should pull -import config
+from backend import configπ Committable suggestion
Suggested change
π€ Prompt for AI Agents |
||||||
| import re | ||||||
| import logging | ||||||
| from typing import Optional | ||||||
| from app.agents.devrel.github.services import github_mcp_service | ||||||
|
|
||||||
| logger = logging.getLogger(__name__) | ||||||
|
|
||||||
| DEFAULT_ORG = os.getenv("GITHUB_ORG", "Aossie-org") | ||||||
| DEFAULT_ORG = config.GITHUB_ORG | ||||||
|
|
||||||
| GH_URL_RE = re.compile( | ||||||
| r'(?:https?://|git@)github\.com[/:]' | ||||||
|
|
||||||
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
| @@ -1,21 +1,20 @@ | ||||||||||
| import logging | ||||||||||
| import os | ||||||||||
| import config | ||||||||||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Fix config import path
-import config
+from backend import configπ Committable suggestion
Suggested change
π€ Prompt for AI Agents |
||||||||||
| from typing import List, Dict, Any, Optional | ||||||||||
| import torch | ||||||||||
| from pydantic import BaseModel | ||||||||||
| from dotenv import load_dotenv | ||||||||||
| from sentence_transformers import SentenceTransformer | ||||||||||
| from langchain_google_genai import ChatGoogleGenerativeAI | ||||||||||
| from langchain_core.messages import HumanMessage | ||||||||||
| from app.core.config import settings | ||||||||||
| from app.models.database.weaviate import WeaviateUserProfile | ||||||||||
| from app.services.embedding_service.profile_summarization.prompts.summarization_prompt import PROFILE_SUMMARIZATION_PROMPT | ||||||||||
|
|
||||||||||
| load_dotenv() | ||||||||||
|
|
||||||||||
| MODEL_NAME = os.getenv("EMBEDDING_MODEL", "BAAI/bge-small-en-v1.5") | ||||||||||
| MAX_BATCH_SIZE = int(os.getenv("EMBEDDING_MAX_BATCH_SIZE", "32")) | ||||||||||
| EMBEDDING_DEVICE = os.getenv("EMBEDDING_DEVICE", "cpu") | ||||||||||
| MODEL_NAME = config.MODEL_NAME | ||||||||||
| MAX_BATCH_SIZE = config.MAX_BATCH_SIZE | ||||||||||
| EMBEDDING_DEVICE = config.EMBEDDING_DEVICE | ||||||||||
|
|
||||||||||
|
|
||||||||||
| logger = logging.getLogger(__name__) | ||||||||||
|
|
||||||||||
|
|
||||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,16 @@ | ||
| from dotenv import load_dotenv, find_dotenv | ||
| import os | ||
|
|
||
|
|
||
| dotenv_path = find_dotenv(usecwd=True) | ||
| if dotenv_path: | ||
| load_dotenv(dotenv_path=dotenv_path) | ||
| else: | ||
| load_dotenv() | ||
|
|
||
| GITHUB_ORG = os.getenv("GITHUB_ORG", "Aossie-org") | ||
| GITHUB_TOKEN = os.getenv("GITHUB_TOKEN") or os.getenv("GH_TOKEN") | ||
|
|
||
| MODEL_NAME = os.getenv("EMBEDDING_MODEL", "BAAI/bge-small-en-v1.5") | ||
| MAX_BATCH_SIZE = int(os.getenv("EMBEDDING_MAX_BATCH_SIZE", "32")) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Add error handling for integer conversion.
Apply this diff to add validation: -MAX_BATCH_SIZE = int(os.getenv("EMBEDDING_MAX_BATCH_SIZE", "32"))
+try:
+ MAX_BATCH_SIZE = int(os.getenv("EMBEDDING_MAX_BATCH_SIZE", "32"))
+except ValueError:
+ raise ValueError(
+ "EMBEDDING_MAX_BATCH_SIZE must be a valid integer. "
+ f"Got: {os.getenv('EMBEDDING_MAX_BATCH_SIZE')}"
+ )π€ Prompt for AI Agents |
||
| EMBEDDING_DEVICE = os.getenv("EMBEDDING_DEVICE", "cpu") | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Qualify config import through backend
Align with the package structure so the module loads reliably.
π Committable suggestion
π€ Prompt for AI Agents