Skip to content
This repository was archived by the owner on Dec 7, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
33 changes: 33 additions & 0 deletions .github/workflows/test-python-app-chat.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Python Tests
on:
# push:
# paths: ['apps/chat/**']
pull_request:
paths: ['apps/chat/**']
jobs:
test:
runs-on: ubuntu-latest
defaults:
run:
working-directory: apps/chat
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.11"
- name: Install dependencies
run: |
pip install -r requirements.txt
pip install pytest pytest-asyncio pytest-cov mypy ruff
- name: Run tests
env:
OTEL_SDK_DISABLED: true
run: |
export PYTHONPATH=.
pytest --cov=app tests/
- name: Type check
run: mypy app --ignore-missing-imports
- name: Lint
run: ruff check app
5 changes: 3 additions & 2 deletions apps/chat/app/api.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from fastapi import APIRouter, HTTPException, status, Depends, Request
from fastapi import APIRouter, HTTPException, Depends, Request
from datetime import datetime
from .schemas import (
ChatRequest, ChatResponse, Message,
ChatRequest, ChatResponse,
CreateProjectRequest, ProjectInfo,
)
from .models import ChatHistory, StoredChatMessage
Expand Down Expand Up @@ -63,6 +63,7 @@ async def chat_endpoint(
except Exception as e:
log.bind(event="chat_api_error", error=str(e)).error("Standard chat request failed")
traceback.print_exc()
# raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="...")
raise HTTPException(status_code=500, detail="AI сервис недоступен")

@api_router.post("/projects", response_model=ProjectInfo)
Expand Down
40 changes: 19 additions & 21 deletions apps/chat/app/behavior/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,43 +4,41 @@

class TaskSchema(BaseModel):
"""Single task definition executed by an agent."""

description: str = Field(..., example="Collect articles about AI")
description: str = Field(..., examples=["Collect articles about AI"])
expected_output: Optional[str] = Field(
None,
alias="expected_output",
example="List of article URLs",
alias="expected_output",
examples=["List of article URLs"]
)
context: Optional[List[str]] = Field(
default_factory=list,
example=["Use only academic sources"],
examples=[["Use only academic sources"]]
)
agent: Optional[str] = Field(None, example="researcher")

agent: Optional[str] = Field(None, examples=["researcher"])
model_config = ConfigDict(populate_by_name=True)


class AgentSchema(BaseModel):
"""CrewAI-style agent configuration."""

role: str = Field(..., example="researcher")
goal: Optional[str] = Field(None, example="Provide an overview of AI trends")
backstory: Optional[str] = Field(None, example="PhD in computer science")
tools: List[str] = Field(default_factory=list, example=["browser"])
allow_delegation: bool = Field(False, alias="allow_delegation", example=True)
role: str = Field(..., examples=["researcher"])
goal: Optional[str] = Field(None, examples=["Provide an overview of AI trends"])
backstory: Optional[str] = Field(None, examples=["PhD in computer science"])
tools: List[str] = Field(default_factory=list, examples=[["browser"]])
allow_delegation: bool = Field(False, alias="allow_delegation", examples=[True])
tasks: List[TaskSchema] = Field(default_factory=list)

model_config = ConfigDict(populate_by_name=True)


class BehaviorDefinition(BaseModel):
"""Root object describing agent behaviors loaded from Notion."""

agents: List[AgentSchema] = Field(
default_factory=list,
example=[{"role": "researcher", "goal": "Find info"}],
examples=[[{"role": "researcher", "goal": "Find info"}]]
)
tasks: List[TaskSchema] = Field(default_factory=list)
process: Optional[str] = Field("sequential", example="sequential")

model_config = ConfigDict(populate_by_name=True)
process: Optional[str] = Field("sequential", examples=["sequential"])
model_config = ConfigDict(populate_by_name=True)
19 changes: 15 additions & 4 deletions apps/chat/app/core/llm_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ async def chat_completion(
span.set_attribute("llm.request_size", request_size)

# Создаем отдельный span для HTTP запроса
with tracer.start_as_current_span("http_request") as http_span:
with tracer.start_as_current_span("http_request"):
async with httpx.AsyncClient() as client:
response = await client.post(
self.api_url,
Expand Down Expand Up @@ -154,7 +154,11 @@ async def chat_completion(
"response_body": e.response.text[:500] # Первые 500 символов
}
)
raise
# Возвращаем error response вместо raise
return {
"error": f"HTTP {e.response.status_code}: {str(e)}",
"choices": [{"message": {"content": f"HTTP ошибка: {e.response.status_code}"}}]
}

except Exception as e:
# Обработка других ошибок
Expand All @@ -177,8 +181,11 @@ async def chat_completion(
"exception.message": str(e)
}
)
# apps/chat/app/core/llm_client.py
# В конец класса OpenRouterClient добавь:
# Возвращаем error response вместо raise
return {
"error": str(e),
"choices": [{"message": {"content": f"Ошибка: {str(e)}"}}]
}

async def generate_reply(
self,
Expand Down Expand Up @@ -230,6 +237,10 @@ async def generate_reply(
model=self.default_model
)

# Проверяем на ошибки
if "error" in response:
return f"Ошибка API: {response['error']}"

# Извлекаем текст ответа
return response["choices"][0]["message"]["content"]

Expand Down
2 changes: 1 addition & 1 deletion apps/chat/app/core/project_memory.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from typing import Dict, List, Optional
from app.schemas import ProjectInfo, CreateProjectRequest
from app.schemas import ProjectInfo
from app.models import ChatHistory, StoredChatMessage
from uuid import uuid4

Expand Down
15 changes: 10 additions & 5 deletions apps/chat/app/integrations/behavior_manager.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from typing import Optional, Dict, Any
import yaml
from pydantic import ValidationError

Expand All @@ -10,27 +11,31 @@ class BehaviorManager:
def __init__(self, notion_client, page_id: str):
self.notion_client = notion_client
self.page_id = page_id
self.behavior: BehaviorDefinition | None = None
self.behavior: Optional[BehaviorDefinition] = None

async def refresh(self) -> None:
log = enrich_context(event="behavior_refresh", page_id=self.page_id)
data = await self.notion_client.fetch_page(self.page_id)
log.info("Behavior page retrieved")

try:
# Expect first child to be a code block with YAML
for block in data.get("results", []):
if block.get("type") == "code":
text = block["code"].get("rich_text", [])
content = "".join(t.get("plain_text", "") for t in text)
raw = yaml.safe_load(content) or {}
raw: Dict[str, Any] = yaml.safe_load(content) or {}
self.behavior = BehaviorDefinition.model_validate(raw)
log.bind(event="behavior_parsed").debug(self.behavior.model_dump())

# Правильная типизация для логирования
behavior_dict = self.behavior.model_dump()
log.bind(event="behavior_parsed").debug(f"Parsed behavior: {behavior_dict}")
log.bind(event="behavior_loaded").info("Behavior updated")
return
log.bind(event="behavior_not_found").warning("No YAML code block found")
except ValidationError as e:
log.bind(event="behavior_validation_error", errors=e.errors()).error("Behavior validation failed")
log.bind(event="behavior_validation_error", errors=str(e.errors())).error("Behavior validation failed")
raise
except Exception as e:
log.bind(event="behavior_parse_error", error=str(e)).error("Failed to parse behavior")
raise
raise
22 changes: 13 additions & 9 deletions apps/chat/app/logger.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import logging
from contextvars import ContextVar
from typing import Any, Dict, Optional
from typing import Any, Dict, cast
from opentelemetry import trace
import structlog

Expand Down Expand Up @@ -30,21 +29,23 @@ def _add_trace_context(self, event_dict: Dict[str, Any]) -> Dict[str, Any]:

def bind(self, **kwargs) -> 'EnrichedLogger':
"""Добавляет контекст к логгеру."""
return EnrichedLogger(self.logger.bind(**kwargs))
# Приводим к нужному типу для MyPy
bound_logger = cast(structlog.BoundLogger, self.logger.bind(**kwargs))
return EnrichedLogger(bound_logger)

def info(self, msg: str, **kwargs):
def info(self, msg: str, **kwargs) -> None:
kwargs = self._add_trace_context(kwargs)
self.logger.info(msg, **kwargs)

def error(self, msg: str, **kwargs):
def error(self, msg: str, **kwargs) -> None:
kwargs = self._add_trace_context(kwargs)
self.logger.error(msg, **kwargs)

def warning(self, msg: str, **kwargs):
def warning(self, msg: str, **kwargs) -> None:
kwargs = self._add_trace_context(kwargs)
self.logger.warning(msg, **kwargs)

def debug(self, msg: str, **kwargs):
def debug(self, msg: str, **kwargs) -> None:
kwargs = self._add_trace_context(kwargs)
self.logger.debug(msg, **kwargs)

Expand Down Expand Up @@ -74,9 +75,12 @@ def enrich_context(**kwargs) -> EnrichedLogger:
Создает логгер с обогащенным контекстом.
Совместимо с существующим кодом.
"""
return EnrichedLogger(base_logger.bind(**kwargs))
# Приводим к нужному типу для MyPy
bound_logger = cast(structlog.BoundLogger, base_logger.bind(**kwargs))
return EnrichedLogger(bound_logger)

def set_request_context(**kwargs):

def set_request_context(**kwargs) -> None:
"""
Устанавливает контекст для всего request.
Используется в middleware или в начале обработки запроса.
Expand Down
51 changes: 4 additions & 47 deletions apps/chat/app/models/chat.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
# apps/chat/app/models/chat.py
from pydantic import BaseModel, Field, ConfigDict
from typing import List, Optional
from uuid import uuid4
from datetime import datetime

from ..schemas import Role

# Импортируем Role из schemas, избегаем дублирования
from ..schemas.chat import Role

class StoredChatMessage(BaseModel):
model_config = ConfigDict(populate_by_name=True)
Expand All @@ -16,54 +15,12 @@ class StoredChatMessage(BaseModel):
span_id: Optional[str] = None
timestamp: datetime = Field(default_factory=datetime.utcnow)


class ChatHistory(BaseModel):
model_config = ConfigDict(populate_by_name=True)

id: str = Field(default_factory=lambda: str(uuid4()))
project_id: str
project_id: str = Field(..., examples=["proj-123"])
messages: List[StoredChatMessage]
trace_id: Optional[str] = None
span_id: Optional[str] = None
timestamp: datetime = Field(default_factory=datetime.utcnow)


# apps/chat/app/schemas/chat.py
from pydantic import BaseModel, Field, ConfigDict
from enum import Enum
from typing import List, Optional


class Role(str, Enum):
user = "user"
assistant = "assistant"
system = "system"


class Message(BaseModel):
role: Role = Field(..., example="user")
content: str = Field(..., example="Hello")


class ChatRequest(BaseModel):
model_config = ConfigDict(populate_by_name=True)

messages: List[Message] = Field(..., example=[{"role": "user", "content": "Hi"}])
user_api_key: Optional[str] = Field(None, alias="userApiKey", example="sk-...")


class ChatResponse(BaseModel):
reply: str = Field(..., example="Hello from AI")


# apps/chat/app/schemas/projects.py
from pydantic import BaseModel, Field


class CreateProjectRequest(BaseModel):
name: str = Field(..., example="My Project")


class ProjectInfo(BaseModel):
id: str = Field(..., example="project-123")
name: str = Field(..., example="Demo Project")
timestamp: datetime = Field(default_factory=datetime.utcnow)
Loading