Skip to content

Commit 3a5c389

Browse files
TOOL-519: Implement mock LLM client prototype and conversation state
1 parent e80f2b4 commit 3a5c389

File tree

6 files changed

+212
-38
lines changed

6 files changed

+212
-38
lines changed
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
use rmcp::{
2+
model::Content,
3+
serde_json::{Map, Value},
4+
};
5+
6+
pub enum ChatMessage {
7+
SystemPrompt {
8+
content: String,
9+
},
10+
Prompt {
11+
content: String,
12+
},
13+
ToolResult {
14+
content: Vec<Content>,
15+
},
16+
ToolCall {
17+
tool_name: String,
18+
arguments: Option<Map<String, Value>>,
19+
},
20+
FinalResponse {
21+
content: String,
22+
},
23+
}
24+
25+
pub struct Conversation {
26+
chat_messages: Vec<ChatMessage>,
27+
// TODO: keep track of steps?
28+
// TODO: conversation ID=?
29+
}
30+
31+
impl Conversation {
32+
pub fn new(system_prompt: String) -> Self {
33+
Self {
34+
chat_messages: vec![ChatMessage::SystemPrompt {
35+
content: system_prompt,
36+
}],
37+
}
38+
}
39+
40+
pub fn chat_messages(&self) -> &[ChatMessage] {
41+
&self.chat_messages
42+
}
43+
44+
pub fn add_chat_message(&mut self, message: ChatMessage) {
45+
self.chat_messages.push(message);
46+
}
47+
}

application/apps/indexer/mcp/src/client/llm.rs

Lines changed: 0 additions & 9 deletions
This file was deleted.
Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,45 @@
1+
use std::time::Duration;
2+
3+
use rmcp::serde_json::{self, Map, Value};
4+
use tokio::time::sleep;
5+
6+
use super::{ClientToLlm, LlmToClient};
7+
use crate::{
8+
client::conversation::{self, ChatMessage, Conversation},
9+
types::McpError,
10+
};
11+
12+
pub struct MockLlmClient;
13+
14+
impl MockLlmClient {
15+
pub async fn process(&self, conversation: &Conversation) -> Result<LlmToClient, McpError> {
16+
match conversation.chat_messages().last() {
17+
Some(ChatMessage::Prompt { content }) => {
18+
// Simulate LLM latency
19+
sleep(Duration::from_secs(5)).await;
20+
21+
Ok(LlmToClient::ToolCall {
22+
tool_name: "apply_filter".into(),
23+
arguments: serde_json::json!({
24+
"filter_type": "low_pass",
25+
"cutoff_frequency": 300,
26+
})
27+
.as_object()
28+
.cloned(),
29+
})
30+
}
31+
32+
Some(ChatMessage::ToolResult { content }) => {
33+
// Simulate follow-up reasoning
34+
sleep(Duration::from_secs(3)).await;
35+
36+
Ok(LlmToClient::FinalResponse {
37+
content: format!("Final LLM Answer: Tool result {:?}", content),
38+
})
39+
}
40+
_ => Err(McpError::Generic {
41+
message: "Mock LLM client received unsupported request".into(),
42+
}),
43+
}
44+
}
45+
}
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
use crate::{
2+
client::conversation::{self, Conversation},
3+
types::McpError,
4+
};
5+
use rmcp::serde_json::{Map, Value};
6+
7+
pub mod mock;
8+
9+
// Client messages sent to the LLM
10+
pub enum ClientToLlm {
11+
SystemPrompt { content: String },
12+
UserPrompt { content: String },
13+
ToolResult { content: String },
14+
}
15+
16+
// LLM messages sent to the client
17+
pub enum LlmToClient {
18+
FinalResponse {
19+
content: String,
20+
},
21+
ToolCall {
22+
tool_name: String,
23+
arguments: Option<Map<String, Value>>,
24+
},
25+
}
26+
27+
/// Configuration for creating an LLM client
28+
#[derive(Debug, Clone)]
29+
pub enum LlmConfig {
30+
Mock,
31+
// OpenAi { api_key: String, model: String },
32+
}
33+
34+
/// The actual LLM client implementation - this is what you use
35+
pub enum Llm {
36+
Mock(mock::MockLlmClient),
37+
// OpenAi(openai::OpenAiClient),
38+
}
39+
40+
impl Llm {
41+
pub fn from_config(config: LlmConfig) -> Self {
42+
match config {
43+
LlmConfig::Mock => Llm::Mock(mock::MockLlmClient),
44+
}
45+
}
46+
47+
pub async fn process(&self, conversation: &Conversation) -> Result<LlmToClient, McpError> {
48+
match self {
49+
Llm::Mock(client) => client.process(conversation).await,
50+
}
51+
}
52+
}

application/apps/indexer/mcp/src/client/mod.rs

Lines changed: 66 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -4,19 +4,22 @@ use tokio::{select, sync::mpsc};
44

55
use rmcp::{
66
RoleClient,
7-
model::{ClientInfo, Implementation, InitializeRequestParam},
7+
model::{CallToolRequestParam, ClientInfo, Implementation, InitializeRequestParam},
8+
serde_json::{Map, Value},
89
service::{RunningService, ServiceExt},
910
transport::StreamableHttpClientTransport,
1011
};
1112

1213
use crate::{
1314
client::{
14-
llm::LlmConfig,
15+
conversation::{ChatMessage, Conversation},
16+
llm::{ClientToLlm, Llm, LlmConfig, LlmToClient},
1517
messages::{McpChipmunkToClient, McpClientToChipmunk},
1618
},
1719
types::McpError,
1820
};
1921

22+
pub mod conversation;
2023
pub mod llm;
2124
pub mod messages;
2225

@@ -56,6 +59,7 @@ impl MCPClient {
5659
}
5760

5861
pub async fn start(self) -> Result<(), McpError> {
62+
// Setup MCP connection
5963
let transport =
6064
StreamableHttpClientTransport::from_uri("http:://localhost:8080/chipmunk_mcp");
6165

@@ -71,7 +75,7 @@ impl MCPClient {
7175
},
7276
};
7377

74-
let mcp_service = client_info
78+
let mcp_service: RunningService<RoleClient, InitializeRequestParam> = client_info
7579
.serve(transport)
7680
.await
7781
.map_err(|e| McpError::Generic {
@@ -80,15 +84,22 @@ impl MCPClient {
8084

8185
warn!("Connected to MCP server: {:?}", mcp_service.peer_info());
8286

83-
let chipmunk_to_client_rx = self.chipmunk_to_client_rx;
84-
8587
let tools = mcp_service.list_tools(Default::default()).await?;
88+
warn!("Available tools: {:?}", tools);
8689

90+
let chipmunk_to_client_rx = self.chipmunk_to_client_rx;
8791
let chipmunk_response_tx = self.client_to_chipmunk_tx.clone();
8892

93+
let llm = Llm::from_config(self.llm_config);
94+
8995
tokio::spawn(async move {
90-
if let Err(e) =
91-
MCPClient::run(chipmunk_to_client_rx, chipmunk_response_tx, mcp_service).await
96+
if let Err(e) = MCPClient::run(
97+
chipmunk_to_client_rx,
98+
chipmunk_response_tx,
99+
mcp_service,
100+
llm,
101+
)
102+
.await
92103
{
93104
error!("MCP client event loop ended: {:?}", e);
94105
}
@@ -99,29 +110,60 @@ impl MCPClient {
99110

100111
pub async fn run(
101112
mut chipmunk_request_rx: mpsc::Receiver<McpChipmunkToClient>,
102-
_chipmunk_response_tx: mpsc::Sender<McpClientToChipmunk>,
103-
mcp_service: RunningService<RoleClient, InitializeRequestParam>,
113+
chipmunk_response_tx: mpsc::Sender<McpClientToChipmunk>,
114+
mut mcp_service: RunningService<RoleClient, InitializeRequestParam>,
115+
llm: Llm,
104116
) -> Result<(), McpError> {
105-
let tools_result = mcp_service.list_tools(Default::default()).await;
106-
107-
loop {
108-
select! {
109-
Some(chipmunk_request) = chipmunk_request_rx.recv() => {
110-
match chipmunk_request {
111-
McpChipmunkToClient::Prompt { .. } => {
112-
// Send prompt to LLM via HTTP Client
113-
// Wait for response and handle:
114-
// match response {
115-
// LlmMessage::ToolCall => {
116-
// mcp_service.tool_call(...)
117-
// }
118-
// }
117+
let mut conversation = Conversation::new("You are a helpful assistant.".into());
118+
119+
select! {
120+
Some(chipmunk_request) = chipmunk_request_rx.recv() => {
121+
match chipmunk_request {
122+
McpChipmunkToClient::Prompt { prompt } => {
123+
conversation.add_chat_message(ChatMessage::Prompt { content: prompt.clone() });
124+
125+
loop {
126+
let llm_response = llm.process(&conversation).await?;
127+
128+
match llm_response {
129+
LlmToClient::ToolCall { tool_name, arguments } => {
130+
let tool_call_param = CallToolRequestParam {
131+
name: tool_name.clone().into(),
132+
arguments: arguments.clone(),
133+
};
134+
135+
let tool_result = mcp_service
136+
.call_tool(tool_call_param)
137+
.await
138+
.map_err(|e| McpError::Generic {
139+
message: e.to_string(),
140+
})?;
141+
142+
conversation.add_chat_message(ChatMessage::ToolResult {
143+
content: tool_result.content,
144+
});
145+
146+
let _llm_response = llm.process(&conversation).await?;
147+
}
148+
149+
LlmToClient::FinalResponse { content } => {
150+
warn!("LLM final response: {:?}", content);
151+
// 5. Send final response back to Chipmunk
152+
// chipmunk_response_tx
153+
// .send(McpClientToChipmunk::FinalAnswer { content })
154+
// .await
155+
// .map_err(|_| McpError::ChannelClosed)?;
156+
157+
break;
158+
}
159+
}
119160
}
120161
}
121162
}
122-
else => break
123163
}
164+
else => {}
124165
}
166+
125167
Ok(())
126168
}
127169
}

application/apps/indexer/mcp/src/lib.rs

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,9 @@ pub struct McpChannelEndpoints {
2020
}
2121

2222
pub fn new() -> (McpServer, MCPClient, McpChannelEndpoints) {
23-
let llm_config = LlmConfig::Dummy {
24-
api_key: String::default(),
25-
model: None,
26-
};
23+
let llm_config = LlmConfig::Mock {};
2724
let mcp_config = McpConfig {
28-
url: String::from("http//:localhost:8080/chipmunk_mcp"),
25+
url: String::from("http//:localhost:8181"),
2926
};
3027
let (mcp_client, chipmunk_to_client_tx, client_to_chipmunk_rx) =
3128
MCPClient::new(mcp_config, llm_config);

0 commit comments

Comments
 (0)