Skip to content

Commit d29c6e8

Browse files
Native: Add MCP implementation
1 parent cb6b226 commit d29c6e8

File tree

11 files changed

+1195
-0
lines changed

11 files changed

+1195
-0
lines changed

application/apps/indexer/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ members = [
1414
"addons/text_grep",
1515
"addons/bufread",
1616
"indexer_base",
17+
"mcp",
1718
"merging",
1819
"parsers",
1920
"plugins_host",
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
[package]
2+
name = "mcp"
3+
edition.workspace = true
4+
version.workspace = true
5+
authors.workspace = true
6+
7+
[dependencies]
8+
axum = { version = "0.7", features = ["macros"] }
9+
ollama-rs = { version = "0.3", features = ["stream"] }
10+
rmcp = { version = "0.11", features = [
11+
"client",
12+
"macros",
13+
"reqwest",
14+
"schemars",
15+
"server",
16+
"transport-io",
17+
"transport-streamable-http-client-reqwest",
18+
"transport-streamable-http-server",
19+
"transport-streamable-http-server-session",
20+
] }
21+
anyhow.workspace = true
22+
log.workspace = true
23+
reqwest = { version = "0.12.25", features = ["json"] }
24+
schemars = "1.1"
25+
serde_json.workspace = true
26+
serde.workspace = true
27+
thiserror.workspace = true
28+
tokio-util.workspace = true
29+
tokio.workspace = true
30+
rand = "0.9"
31+
url = { version = "2.5", features = ["serde"] }
32+
33+
[lints]
34+
workspace = true
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
use ollama_rs::generation::chat::{ChatMessage, ChatMessageResponse};
2+
use rmcp::model::ListToolsResult;
3+
use serde::{Deserialize, Serialize};
4+
5+
use crate::errors::McpError;
6+
7+
// pub mod claude;
8+
pub mod ollama;
9+
pub mod open_ai;
10+
11+
pub const LLM_API_KEY: &str = "LLM_KEY";
12+
13+
pub trait LlmAgent {
14+
fn new(model: &str, url: &str) -> Self;
15+
16+
async fn send_chat_message(
17+
&self,
18+
prompt: String,
19+
history: &mut Vec<ChatMessage>,
20+
tools: ListToolsResult,
21+
) -> Result<ChatMessageResponse, McpError>;
22+
23+
fn api_key() -> Option<String> {
24+
std::env::var(LLM_API_KEY).ok()
25+
}
26+
}
27+
28+
pub trait ChatResponse {}
29+
30+
#[derive(Debug, Default, Serialize, Deserialize)]
31+
pub enum LlmProvider {
32+
#[default]
33+
Ollama,
34+
OpenAI,
35+
Claude,
36+
}
Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
use ollama_rs::generation::{
2+
chat::{ChatMessage, ChatMessageResponse, request::ChatMessageRequest},
3+
tools::{ToolFunctionInfo, ToolInfo, ToolType},
4+
};
5+
use rmcp::model::ListToolsResult;
6+
use serde::{Deserialize, Serialize};
7+
use serde_json::Value;
8+
use url::Url;
9+
10+
use super::LlmAgent;
11+
use crate::{agents, errors::McpError};
12+
13+
#[derive(Debug, Clone, Deserialize, Serialize)]
14+
pub struct Ollama {
15+
pub model: String,
16+
pub url: String,
17+
pub api_key: Option<String>,
18+
}
19+
20+
impl Default for Ollama {
21+
fn default() -> Self {
22+
Self {
23+
model: String::from("llama3.2"),
24+
url: String::from("http://localhost:11434"),
25+
api_key: None,
26+
}
27+
}
28+
}
29+
30+
impl LlmAgent for Ollama {
31+
fn new(model: &str, url: &str) -> Self {
32+
Self {
33+
model: String::from(model),
34+
url: String::from(url),
35+
api_key: None,
36+
}
37+
}
38+
39+
async fn send_chat_message(
40+
&self,
41+
prompt: String,
42+
history: &mut Vec<ChatMessage>,
43+
tools: ListToolsResult,
44+
) -> Result<ChatMessageResponse, McpError> {
45+
let url = Url::parse(self.url.as_str()).map_err(|err| McpError::Generic {
46+
message: format!("Error while parsing URL for the Ollama Agent; {err:?}"),
47+
})?;
48+
49+
let client = ollama_rs::Ollama::from_url(url);
50+
51+
let mcp_tools = tools
52+
.tools
53+
.iter()
54+
.map(|tool| ToolInfo {
55+
tool_type: ToolType::Function,
56+
function: ToolFunctionInfo {
57+
name: tool.name.to_string(),
58+
description: tool
59+
.description
60+
.as_ref()
61+
.map(|x| x.to_string())
62+
.unwrap_or("No description".to_string()),
63+
parameters: serde_json::from_value(Value::Object((*tool.input_schema).clone()))
64+
.unwrap_or(schemars::json_schema!({"type": ["object", "null"]})),
65+
},
66+
})
67+
.collect::<Vec<ToolInfo>>();
68+
69+
let chat_message_request =
70+
ChatMessageRequest::new(self.model.clone(), vec![ChatMessage::user(prompt)])
71+
.tools(mcp_tools);
72+
73+
client
74+
.send_chat_messages_with_history(history, chat_message_request)
75+
.await
76+
.map_err(|e| McpError::Generic {
77+
message: e.to_string(),
78+
})
79+
}
80+
}

0 commit comments

Comments
 (0)