Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions backend/aci/server/agent/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,11 @@

logger = get_logger(__name__)

# OpenAI API has limits on tool count and total token size of tool definitions.
# Empirically, requests fail around 128+ tools or when tool definitions exceed ~200k tokens.
# Using a conservative limit to prevent cryptic API errors.
MAX_TOOLS = 64


def convert_to_openai_messages(messages: list[ClientMessage]) -> list[ChatCompletionMessageParam]:
"""
Expand Down Expand Up @@ -69,7 +74,19 @@ async def openai_chat_stream(
Args:
messages: List of chat messages
tools: List of tools to use

Raises:
ValueError: If too many tools are selected, exceeding API limits.
"""
if len(tools) > MAX_TOOLS:
logger.warning(
f"Tool limit exceeded: {len(tools)} tools requested, max is {MAX_TOOLS}"
)
raise ValueError(
f"Too many tools selected ({len(tools)}). Maximum allowed is {MAX_TOOLS}. "
"Please reduce the number of selected functions or apps."
)

client = OpenAI(api_key=config.OPENAI_API_KEY)

# TODO: support different meta function mode ACI_META_FUNCTIONS_SCHEMA_LIST
Expand Down
10 changes: 9 additions & 1 deletion backend/aci/server/routes/agent.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import Annotated

from fastapi import APIRouter, Depends
from fastapi import APIRouter, Depends, HTTPException
from fastapi.responses import StreamingResponse
from openai import OpenAI
from pydantic import BaseModel
Expand All @@ -12,6 +12,7 @@
from aci.server import dependencies as deps
from aci.server.agent.prompt import (
ClientMessage,
MAX_TOOLS,
convert_to_openai_messages,
openai_chat_stream,
)
Expand Down Expand Up @@ -62,6 +63,13 @@ async def handle_chat(
func for func in selected_functions if isinstance(func, OpenAIResponsesFunctionDefinition)
]

if len(tools) > MAX_TOOLS:
raise HTTPException(
status_code=400,
detail=f"Too many tools selected ({len(tools)}). Maximum allowed is {MAX_TOOLS}. "
"Please reduce the number of selected functions or apps.",
)

response = StreamingResponse(openai_chat_stream(openai_messages, tools=tools))
response.headers["x-vercel-ai-data-stream"] = "v1"

Expand Down
Loading