Skip to content

Commit f111615

Browse files
committed
Feature: Enable programmatically pass in api_key besides reading from env
1 parent ccb1b0f commit f111615

File tree

8 files changed

+11
-59
lines changed

8 files changed

+11
-59
lines changed

src/llm/anthropic.rs

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,11 +14,7 @@ pub struct Client {
1414
}
1515

1616
impl Client {
17-
pub async fn new(
18-
address: Option<String>,
19-
api_key: Option<String>,
20-
_api_config: Option<super::LlmApiConfig>,
21-
) -> Result<Self> {
17+
pub async fn new(address: Option<String>, api_key: Option<String>) -> Result<Self> {
2218
if address.is_some() {
2319
api_bail!("Anthropic doesn't support custom API address");
2420
}

src/llm/gemini.rs

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,11 +34,7 @@ pub struct AiStudioClient {
3434
}
3535

3636
impl AiStudioClient {
37-
pub fn new(
38-
address: Option<String>,
39-
api_key: Option<String>,
40-
_api_config: Option<super::LlmApiConfig>,
41-
) -> Result<Self> {
37+
pub fn new(address: Option<String>, api_key: Option<String>) -> Result<Self> {
4238
if address.is_some() {
4339
api_bail!("Gemini doesn't support custom API address");
4440
}

src/llm/litellm.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ impl Client {
77
pub async fn new_litellm(
88
address: Option<String>,
99
api_key: Option<String>,
10-
_api_config: Option<super::LlmApiConfig>,
1110
) -> anyhow::Result<Self> {
1211
let address = address.unwrap_or_else(|| "http://127.0.0.1:4000".to_string());
1312

src/llm/mod.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -127,28 +127,28 @@ pub async fn new_llm_generation_client(
127127
}
128128
LlmApiType::OpenAi => Box::new(openai::Client::new(address, api_key, api_config)?)
129129
as Box<dyn LlmGenerationClient>,
130-
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_key, api_config)?)
130+
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_key)?)
131131
as Box<dyn LlmGenerationClient>,
132132
LlmApiType::VertexAi => {
133133
Box::new(gemini::VertexAiClient::new(address, api_key, api_config).await?)
134134
as Box<dyn LlmGenerationClient>
135135
}
136136
LlmApiType::Anthropic => {
137-
Box::new(anthropic::Client::new(address, api_key, api_config).await?)
137+
Box::new(anthropic::Client::new(address, api_key).await?)
138138
as Box<dyn LlmGenerationClient>
139139
}
140140
LlmApiType::LiteLlm => {
141-
Box::new(litellm::Client::new_litellm(address, api_key, api_config).await?)
141+
Box::new(litellm::Client::new_litellm(address, api_key).await?)
142142
as Box<dyn LlmGenerationClient>
143143
}
144144
LlmApiType::OpenRouter => {
145-
Box::new(openrouter::Client::new_openrouter(address, api_key, api_config).await?)
145+
Box::new(openrouter::Client::new_openrouter(address, api_key).await?)
146146
as Box<dyn LlmGenerationClient>
147147
}
148148
LlmApiType::Voyage => {
149149
api_bail!("Voyage is not supported for generation")
150150
}
151-
LlmApiType::Vllm => Box::new(vllm::Client::new_vllm(address, api_key, api_config).await?)
151+
LlmApiType::Vllm => Box::new(vllm::Client::new_vllm(address, api_key).await?)
152152
as Box<dyn LlmGenerationClient>,
153153
};
154154
Ok(client)
@@ -164,11 +164,11 @@ pub async fn new_llm_embedding_client(
164164
LlmApiType::Ollama => {
165165
Box::new(ollama::Client::new(address).await?) as Box<dyn LlmEmbeddingClient>
166166
}
167-
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_key, api_config)?)
167+
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_key)?)
168168
as Box<dyn LlmEmbeddingClient>,
169169
LlmApiType::OpenAi => Box::new(openai::Client::new(address, api_key, api_config)?)
170170
as Box<dyn LlmEmbeddingClient>,
171-
LlmApiType::Voyage => Box::new(voyage::Client::new(address, api_key, api_config)?)
171+
LlmApiType::Voyage => Box::new(voyage::Client::new(address, api_key)?)
172172
as Box<dyn LlmEmbeddingClient>,
173173
LlmApiType::VertexAi => {
174174
Box::new(gemini::VertexAiClient::new(address, api_key, api_config).await?)

src/llm/openrouter.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ impl Client {
77
pub async fn new_openrouter(
88
address: Option<String>,
99
api_key: Option<String>,
10-
_api_config: Option<super::LlmApiConfig>,
1110
) -> anyhow::Result<Self> {
1211
let address = address.unwrap_or_else(|| "https://openrouter.ai/api/v1".to_string());
1312

src/llm/vllm.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ impl Client {
77
pub async fn new_vllm(
88
address: Option<String>,
99
api_key: Option<String>,
10-
_api_config: Option<super::LlmApiConfig>,
1110
) -> anyhow::Result<Self> {
1211
let address = address.unwrap_or_else(|| "http://127.0.0.1:8000/v1".to_string());
1312

src/llm/voyage.rs

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -33,11 +33,7 @@ pub struct Client {
3333
}
3434

3535
impl Client {
36-
pub fn new(
37-
address: Option<String>,
38-
api_key: Option<String>,
39-
_api_config: Option<super::LlmApiConfig>,
40-
) -> Result<Self> {
36+
pub fn new(address: Option<String>, api_key: Option<String>) -> Result<Self> {
4137
if address.is_some() {
4238
api_bail!("Voyage AI doesn't support custom API address");
4339
}

src/ops/functions/embed_text.rs

Lines changed: 1 addition & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -93,44 +93,11 @@ impl SimpleFunctionFactoryBase for Factory {
9393
.expect_type(&ValueType::Basic(BasicValueType::Str))?
9494
.required()?;
9595

96-
// Create API config based on api_key parameter if provided
97-
let api_config = if let Some(_api_key) = &spec.api_key {
98-
match spec.api_type {
99-
LlmApiType::OpenAi => Some(LlmApiConfig::OpenAi(
100-
super::super::super::llm::OpenAiConfig {
101-
org_id: None,
102-
project_id: None,
103-
},
104-
)),
105-
LlmApiType::Anthropic
106-
| LlmApiType::Gemini
107-
| LlmApiType::Voyage
108-
| LlmApiType::LiteLlm
109-
| LlmApiType::OpenRouter
110-
| LlmApiType::Vllm => {
111-
// These API types don't require a config, just an API key
112-
None
113-
}
114-
_ => {
115-
if let Some(config) = spec.api_config.clone() {
116-
Some(config)
117-
} else {
118-
api_bail!(
119-
"API key parameter is not supported for API type {:?}",
120-
spec.api_type
121-
)
122-
}
123-
}
124-
}
125-
} else {
126-
spec.api_config.clone()
127-
};
128-
12996
let client = new_llm_embedding_client(
13097
spec.api_type,
13198
spec.address.clone(),
13299
spec.api_key.clone(),
133-
api_config,
100+
spec.api_config.clone(),
134101
)
135102
.await?;
136103
let output_dimension = match spec.output_dimension {

0 commit comments

Comments
 (0)