Skip to content

Commit f501ef4

Browse files
committed
Route to sql_generator service if MCP is disabled
1 parent 614f976 commit f501ef4

File tree

3 files changed

+28
-2
lines changed

3 files changed

+28
-2
lines changed

src/datu/app_config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ class DatuConfig(BaseSettings):
9999
schema_categorical_detection: bool = True
100100
schema_sample_limit: int = 1000
101101
schema_categorical_threshold: int = 10
102-
enable_mcp: bool = True
102+
enable_mcp: bool = False
103103
mcp: MCPConfig | None = Field(
104104
default_factory=MCPConfig,
105105
description="Configuration settings for MCP integration.",

src/datu/llm_clients/openai_client.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ class OpenAIClient(BaseLLMClient):
6666
"""
6767

6868
def __init__(self):
69+
"""Initializes the OpenAIClient with the configured model and API key."""
6970
super().__init__()
7071
self.model = getattr(settings, "openai_model", "gpt-4o-mini")
7172
self.client = ChatOpenAI(
@@ -74,6 +75,7 @@ def __init__(self):
7475
temperature=settings.llm_temperature,
7576
)
7677
self.history = ChatMessageHistory()
78+
self.agent = None
7779
if settings.enable_mcp:
7880
if not self.mcp_client:
7981
raise RuntimeError("MCP is enabled but mcp_client was not initialized. ")
@@ -90,13 +92,28 @@ def __init__(self):
9092
raise
9193

9294
async def chat(self, input_text: str) -> str:
95+
"""Sends a chat message to the MCP agent and returns the response.
96+
Args:
97+
input_text (str): The input text to send to the agent.
98+
Returns:
99+
str: The response from the agent."""
100+
101+
if not settings.enable_mcp or self.agent is None:
102+
raise RuntimeError("chat() requires MCP enabled and an initialized agent.")
93103
response = await self.agent.run(
94104
input_text,
95105
max_steps=30,
96106
)
97107
return response
98108

99109
async def chat_completion(self, messages: list[BaseMessage], system_prompt: str | None = None) -> str:
110+
"""Generates a chat completion response based on the provided messages and system prompt.
111+
Args:
112+
messages (list[BaseMessage]): A list of messages to send to the LLM.
113+
system_prompt (str | None): An optional system prompt to guide the LLM's response.
114+
Returns:
115+
str: The generated response from the LLM.
116+
"""
100117
if settings.simulate_llm_response:
101118
return create_simulated_llm_response()
102119
if not messages:
@@ -141,7 +158,12 @@ async def chat_completion(self, messages: list[BaseMessage], system_prompt: str
141158
# Adjust this if your llm_with_tools expects different format
142159
input_text = "\n".join(msg.content for msg in self.history.messages if hasattr(msg, "content"))
143160

144-
response = await self.chat(input_text)
161+
if settings.enable_mcp:
162+
# uses MCP agent
163+
response = await self.chat(input_text)
164+
else:
165+
# direct LLM call without MCP
166+
response = await self.client.ainvoke(self.history.messages)
145167

146168
# Assuming response is a BaseMessage or similar with 'content'
147169
if hasattr(response, "content"):

src/datu/routers/chat.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
QueryDetails,
1515
estimate_query_complexity,
1616
extract_sql_blocks,
17+
generate_sql_core,
1718
get_query_execution_time_estimate,
1819
)
1920
from datu.services.sql_generator.normalizer import normalize_for_preview
@@ -38,6 +39,9 @@ async def chat_with_llm(request: ChatRequest):
3839
}
3940
"""
4041
try:
42+
if not settings.enable_mcp:
43+
return await generate_sql_core(request)
44+
4145
if not request.system_prompt:
4246
system_prompt = """
4347
You have access to MCP tools (e.g., SQL generation, web browsing, file operations, data retrieval).

0 commit comments

Comments
 (0)