Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions application/apps/indexer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ members = [
"addons/text_grep",
"addons/bufread",
"indexer_base",
"mcp",
"merging",
"parsers",
"plugins_host",
Expand All @@ -18,6 +19,12 @@ members = [
"stypes"
]

[workspace.package]
edition = "2024"
version = "4.0.0"
authors = ["esrlabs.com"]


[workspace.dependencies]
log = "0.4"
serde = { version = "1.0", features = ["derive"] }
Expand Down
34 changes: 34 additions & 0 deletions application/apps/indexer/mcp/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
[package]
name = "mcp"
edition.workspace = true
version.workspace = true
authors.workspace = true

[dependencies]
axum = { version = "0.7", features = ["macros"] }
ollama-rs = { version = "0.3", features = ["stream"] }
rmcp = { version = "0.11", features = [
"client",
"macros",
"reqwest",
"schemars",
"server",
"transport-io",
"transport-streamable-http-client-reqwest",
"transport-streamable-http-server",
"transport-streamable-http-server-session",
] }
anyhow.workspace = true
log.workspace = true
reqwest = { version = "0.12.25", features = ["json"] }
schemars = "1.1"
serde_json.workspace = true
serde.workspace = true
thiserror.workspace = true
tokio-util.workspace = true
tokio.workspace = true
rand = "0.9"
url = { version = "2.5", features = ["serde"] }

[lints]
workspace = true
36 changes: 36 additions & 0 deletions application/apps/indexer/mcp/src/agents/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
use ollama_rs::generation::chat::{ChatMessage, ChatMessageResponse};
use rmcp::model::ListToolsResult;
use serde::{Deserialize, Serialize};

use crate::errors::McpError;

// pub mod claude;
pub mod ollama;
pub mod open_ai;

pub const LLM_API_KEY: &str = "LLM_KEY";

pub trait LlmAgent {
fn new(model: &str, url: &str) -> Self;

async fn send_chat_message(
&self,
prompt: String,
history: &mut Vec<ChatMessage>,
tools: ListToolsResult,
) -> Result<ChatMessageResponse, McpError>;

fn api_key() -> Option<String> {
std::env::var(LLM_API_KEY).ok()
}
}

pub trait ChatResponse {}

#[derive(Debug, Default, Serialize, Deserialize)]
pub enum LlmProvider {
#[default]
Ollama,
OpenAI,
Claude,
}
80 changes: 80 additions & 0 deletions application/apps/indexer/mcp/src/agents/ollama.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
use ollama_rs::generation::{
chat::{ChatMessage, ChatMessageResponse, request::ChatMessageRequest},
tools::{ToolFunctionInfo, ToolInfo, ToolType},
};
use rmcp::model::ListToolsResult;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use url::Url;

use super::LlmAgent;
use crate::{agents, errors::McpError};

#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Ollama {
pub model: String,
pub url: String,
pub api_key: Option<String>,
}

impl Default for Ollama {
fn default() -> Self {
Self {
model: String::from("llama3.2"),
url: String::from("http://localhost:11434"),
api_key: None,
}
}
}

impl LlmAgent for Ollama {
fn new(model: &str, url: &str) -> Self {
Self {
model: String::from(model),
url: String::from(url),
api_key: None,
}
}

async fn send_chat_message(
&self,
prompt: String,
history: &mut Vec<ChatMessage>,
tools: ListToolsResult,
) -> Result<ChatMessageResponse, McpError> {
let url = Url::parse(self.url.as_str()).map_err(|err| McpError::Generic {
message: format!("Error while parsing URL for the Ollama Agent; {err:?}"),
})?;

let client = ollama_rs::Ollama::from_url(url);

let mcp_tools = tools
.tools
.iter()
.map(|tool| ToolInfo {
tool_type: ToolType::Function,
function: ToolFunctionInfo {
name: tool.name.to_string(),
description: tool
.description
.as_ref()
.map(|x| x.to_string())
.unwrap_or("No description".to_string()),
parameters: serde_json::from_value(Value::Object((*tool.input_schema).clone()))
.unwrap_or(schemars::json_schema!({"type": ["object", "null"]})),
},
})
.collect::<Vec<ToolInfo>>();

let chat_message_request =
ChatMessageRequest::new(self.model.clone(), vec![ChatMessage::user(prompt)])
.tools(mcp_tools);

client
.send_chat_messages_with_history(history, chat_message_request)
.await
.map_err(|e| McpError::Generic {
message: e.to_string(),
})
}
}
Loading