Skip to content

Commit 0059fd8

Browse files
committed
Moved api_key to Common LlmSpec Layer
1 parent afb5a96 commit 0059fd8

File tree

11 files changed

+72
-121
lines changed

11 files changed

+72
-121
lines changed

python/cocoindex/llm.py

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@ class OpenAiConfig:
3434

3535
org_id: str | None = None
3636
project_id: str | None = None
37-
api_key: str | None = None
3837

3938

4039
@dataclass
@@ -43,53 +42,41 @@ class AnthropicConfig:
4342

4443
kind = "Anthropic"
4544

46-
api_key: str | None = None
47-
4845

4946
@dataclass
5047
class GeminiConfig:
5148
"""A specification for a Gemini LLM."""
5249

5350
kind = "Gemini"
5451

55-
api_key: str | None = None
56-
5752

5853
@dataclass
5954
class VoyageConfig:
6055
"""A specification for a Voyage LLM."""
6156

6257
kind = "Voyage"
6358

64-
api_key: str | None = None
65-
6659

6760
@dataclass
6861
class LiteLlmConfig:
6962
"""A specification for a LiteLLM LLM."""
7063

7164
kind = "LiteLlm"
7265

73-
api_key: str | None = None
74-
7566

7667
@dataclass
7768
class OpenRouterConfig:
7869
"""A specification for an OpenRouter LLM."""
7970

8071
kind = "OpenRouter"
8172

82-
api_key: str | None = None
83-
8473

8574
@dataclass
8675
class VllmConfig:
8776
"""A specification for a VLLM LLM."""
8877

8978
kind = "Vllm"
9079

91-
api_key: str | None = None
92-
9380

9481
@dataclass
9582
class LlmSpec:
@@ -98,4 +85,5 @@ class LlmSpec:
9885
api_type: LlmApiType
9986
model: str
10087
address: str | None = None
88+
api_key: str | None = None
10189
api_config: VertexAiConfig | OpenAiConfig | AnthropicConfig | GeminiConfig | VoyageConfig | LiteLlmConfig | OpenRouterConfig | VllmConfig | None = None

src/llm/anthropic.rs

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,23 +14,23 @@ pub struct Client {
1414
}
1515

1616
impl Client {
17-
pub async fn new(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> Result<Self> {
17+
pub async fn new(
18+
address: Option<String>,
19+
api_key: Option<String>,
20+
_api_config: Option<super::LlmApiConfig>,
21+
) -> Result<Self> {
1822
if address.is_some() {
1923
api_bail!("Anthropic doesn't support custom API address");
2024
}
21-
22-
let api_key = if let Some(super::LlmApiConfig::Anthropic(config)) = api_config {
23-
if let Some(key) = config.api_key {
24-
key
25-
} else {
26-
std::env::var("ANTHROPIC_API_KEY")
27-
.map_err(|_| anyhow::anyhow!("ANTHROPIC_API_KEY environment variable must be set"))?
28-
}
25+
26+
let api_key = if let Some(key) = api_key {
27+
key
2928
} else {
30-
std::env::var("ANTHROPIC_API_KEY")
31-
.map_err(|_| anyhow::anyhow!("ANTHROPIC_API_KEY environment variable must be set"))?
29+
std::env::var("ANTHROPIC_API_KEY").map_err(|_| {
30+
anyhow::anyhow!("ANTHROPIC_API_KEY environment variable must be set")
31+
})?
3232
};
33-
33+
3434
Ok(Self {
3535
api_key,
3636
client: reqwest::Client::new(),

src/llm/gemini.rs

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -30,18 +30,13 @@ pub struct AiStudioClient {
3030
}
3131

3232
impl AiStudioClient {
33-
pub fn new(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> Result<Self> {
33+
pub fn new(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> Result<Self> {
3434
if address.is_some() {
3535
api_bail!("Gemini doesn't support custom API address");
3636
}
3737

38-
let api_key = if let Some(super::LlmApiConfig::Gemini(config)) = api_config {
39-
if let Some(key) = config.api_key {
40-
key
41-
} else {
42-
std::env::var("GEMINI_API_KEY")
43-
.map_err(|_| anyhow::anyhow!("GEMINI_API_KEY environment variable must be set"))?
44-
}
38+
let api_key = if let Some(key) = api_key {
39+
key
4540
} else {
4641
std::env::var("GEMINI_API_KEY")
4742
.map_err(|_| anyhow::anyhow!("GEMINI_API_KEY environment variable must be set"))?
@@ -249,6 +244,7 @@ pub struct VertexAiClient {
249244
impl VertexAiClient {
250245
pub async fn new(
251246
address: Option<String>,
247+
_api_key: Option<String>,
252248
api_config: Option<super::LlmApiConfig>,
253249
) -> Result<Self> {
254250
if address.is_some() {

src/llm/litellm.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,10 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_litellm(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
7+
pub async fn new_litellm(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
88
let address = address.unwrap_or_else(|| "http://127.0.0.1:4000".to_string());
99

10-
let api_key = if let Some(super::LlmApiConfig::LiteLlm(config)) = api_config {
11-
config.api_key.or_else(|| std::env::var("LITELLM_API_KEY").ok())
12-
} else {
13-
std::env::var("LITELLM_API_KEY").ok()
14-
};
10+
let api_key = api_key.or_else(|| std::env::var("LITELLM_API_KEY").ok());
1511

1612
let mut config = OpenAIConfig::new().with_api_base(address);
1713
if let Some(api_key) = api_key {

src/llm/mod.rs

Lines changed: 33 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -30,38 +30,25 @@ pub struct VertexAiConfig {
3030
pub struct OpenAiConfig {
3131
pub org_id: Option<String>,
3232
pub project_id: Option<String>,
33-
pub api_key: Option<String>,
3433
}
3534

3635
#[derive(Debug, Clone, Serialize, Deserialize)]
37-
pub struct AnthropicConfig {
38-
pub api_key: Option<String>,
39-
}
36+
pub struct AnthropicConfig {}
4037

4138
#[derive(Debug, Clone, Serialize, Deserialize)]
42-
pub struct GeminiConfig {
43-
pub api_key: Option<String>,
44-
}
39+
pub struct GeminiConfig {}
4540

4641
#[derive(Debug, Clone, Serialize, Deserialize)]
47-
pub struct VoyageConfig {
48-
pub api_key: Option<String>,
49-
}
42+
pub struct VoyageConfig {}
5043

5144
#[derive(Debug, Clone, Serialize, Deserialize)]
52-
pub struct LiteLlmConfig {
53-
pub api_key: Option<String>,
54-
}
45+
pub struct LiteLlmConfig {}
5546

5647
#[derive(Debug, Clone, Serialize, Deserialize)]
57-
pub struct OpenRouterConfig {
58-
pub api_key: Option<String>,
59-
}
48+
pub struct OpenRouterConfig {}
6049

6150
#[derive(Debug, Clone, Serialize, Deserialize)]
62-
pub struct VllmConfig {
63-
pub api_key: Option<String>,
64-
}
51+
pub struct VllmConfig {}
6552

6653
#[derive(Debug, Clone, Serialize, Deserialize)]
6754
#[serde(tag = "kind")]
@@ -81,6 +68,7 @@ pub struct LlmSpec {
8168
pub api_type: LlmApiType,
8269
pub address: Option<String>,
8370
pub model: String,
71+
pub api_key: Option<String>,
8472
pub api_config: Option<LlmApiConfig>,
8573
}
8674

@@ -154,31 +142,37 @@ mod voyage;
154142
pub async fn new_llm_generation_client(
155143
api_type: LlmApiType,
156144
address: Option<String>,
145+
api_key: Option<String>,
157146
api_config: Option<LlmApiConfig>,
158147
) -> Result<Box<dyn LlmGenerationClient>> {
159148
let client = match api_type {
160149
LlmApiType::Ollama => {
161150
Box::new(ollama::Client::new(address).await?) as Box<dyn LlmGenerationClient>
162151
}
163-
LlmApiType::OpenAi => {
164-
Box::new(openai::Client::new(address, api_config)?) as Box<dyn LlmGenerationClient>
165-
}
166-
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_config)?)
167-
as Box<dyn LlmGenerationClient>,
168-
LlmApiType::VertexAi => Box::new(gemini::VertexAiClient::new(address, api_config).await?)
152+
LlmApiType::OpenAi => Box::new(openai::Client::new(address, api_key, api_config)?)
169153
as Box<dyn LlmGenerationClient>,
170-
LlmApiType::Anthropic => Box::new(anthropic::Client::new(address, api_config).await?)
171-
as Box<dyn LlmGenerationClient>,
172-
LlmApiType::LiteLlm => Box::new(litellm::Client::new_litellm(address, api_config).await?)
154+
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_key, api_config)?)
173155
as Box<dyn LlmGenerationClient>,
156+
LlmApiType::VertexAi => {
157+
Box::new(gemini::VertexAiClient::new(address, api_key, api_config).await?)
158+
as Box<dyn LlmGenerationClient>
159+
}
160+
LlmApiType::Anthropic => {
161+
Box::new(anthropic::Client::new(address, api_key, api_config).await?)
162+
as Box<dyn LlmGenerationClient>
163+
}
164+
LlmApiType::LiteLlm => {
165+
Box::new(litellm::Client::new_litellm(address, api_key, api_config).await?)
166+
as Box<dyn LlmGenerationClient>
167+
}
174168
LlmApiType::OpenRouter => {
175-
Box::new(openrouter::Client::new_openrouter(address, api_config).await?)
169+
Box::new(openrouter::Client::new_openrouter(address, api_key, api_config).await?)
176170
as Box<dyn LlmGenerationClient>
177171
}
178172
LlmApiType::Voyage => {
179173
api_bail!("Voyage is not supported for generation")
180174
}
181-
LlmApiType::Vllm => Box::new(vllm::Client::new_vllm(address, api_config).await?)
175+
LlmApiType::Vllm => Box::new(vllm::Client::new_vllm(address, api_key, api_config).await?)
182176
as Box<dyn LlmGenerationClient>,
183177
};
184178
Ok(client)
@@ -187,22 +181,23 @@ pub async fn new_llm_generation_client(
187181
pub async fn new_llm_embedding_client(
188182
api_type: LlmApiType,
189183
address: Option<String>,
184+
api_key: Option<String>,
190185
api_config: Option<LlmApiConfig>,
191186
) -> Result<Box<dyn LlmEmbeddingClient>> {
192187
let client = match api_type {
193188
LlmApiType::Ollama => {
194189
Box::new(ollama::Client::new(address).await?) as Box<dyn LlmEmbeddingClient>
195190
}
196-
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_config)?)
191+
LlmApiType::Gemini => Box::new(gemini::AiStudioClient::new(address, api_key, api_config)?)
197192
as Box<dyn LlmEmbeddingClient>,
198-
LlmApiType::OpenAi => {
199-
Box::new(openai::Client::new(address, api_config)?) as Box<dyn LlmEmbeddingClient>
200-
}
201-
LlmApiType::Voyage => {
202-
Box::new(voyage::Client::new(address, api_config)?) as Box<dyn LlmEmbeddingClient>
203-
}
204-
LlmApiType::VertexAi => Box::new(gemini::VertexAiClient::new(address, api_config).await?)
193+
LlmApiType::OpenAi => Box::new(openai::Client::new(address, api_key, api_config)?)
194+
as Box<dyn LlmEmbeddingClient>,
195+
LlmApiType::Voyage => Box::new(voyage::Client::new(address, api_key, api_config)?)
205196
as Box<dyn LlmEmbeddingClient>,
197+
LlmApiType::VertexAi => {
198+
Box::new(gemini::VertexAiClient::new(address, api_key, api_config).await?)
199+
as Box<dyn LlmEmbeddingClient>
200+
}
206201
LlmApiType::OpenRouter | LlmApiType::LiteLlm | LlmApiType::Vllm | LlmApiType::Anthropic => {
207202
api_bail!("Embedding is not supported for API type {:?}", api_type)
208203
}

src/llm/openai.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ impl Client {
3333
Self { client }
3434
}
3535

36-
pub fn new(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> Result<Self> {
36+
pub fn new(address: Option<String>, api_key: Option<String>, api_config: Option<super::LlmApiConfig>) -> Result<Self> {
3737
let config = match api_config {
3838
Some(super::LlmApiConfig::OpenAi(config)) => config,
3939
Some(_) => api_bail!("unexpected config type, expected OpenAiConfig"),
@@ -50,8 +50,8 @@ impl Client {
5050
if let Some(project_id) = config.project_id {
5151
openai_config = openai_config.with_project_id(project_id);
5252
}
53-
if let Some(api_key) = config.api_key {
54-
openai_config = openai_config.with_api_key(api_key);
53+
if let Some(key) = api_key {
54+
openai_config = openai_config.with_api_key(key);
5555
} else {
5656
// Verify API key is set in environment if not provided in config
5757
if std::env::var("OPENAI_API_KEY").is_err() {

src/llm/openrouter.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,10 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_openrouter(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
7+
pub async fn new_openrouter(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
88
let address = address.unwrap_or_else(|| "https://openrouter.ai/api/v1".to_string());
99

10-
let api_key = if let Some(super::LlmApiConfig::OpenRouter(config)) = api_config {
11-
config.api_key.or_else(|| std::env::var("OPENROUTER_API_KEY").ok())
12-
} else {
13-
std::env::var("OPENROUTER_API_KEY").ok()
14-
};
10+
let api_key = api_key.or_else(|| std::env::var("OPENROUTER_API_KEY").ok());
1511

1612
let mut config = OpenAIConfig::new().with_api_base(address);
1713
if let Some(api_key) = api_key {

src/llm/vllm.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,10 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_vllm(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
7+
pub async fn new_vllm(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
88
let address = address.unwrap_or_else(|| "http://127.0.0.1:8000/v1".to_string());
99

10-
let api_key = if let Some(super::LlmApiConfig::Vllm(config)) = api_config {
11-
config.api_key.or_else(|| std::env::var("VLLM_API_KEY").ok())
12-
} else {
13-
std::env::var("VLLM_API_KEY").ok()
14-
};
10+
let api_key = api_key.or_else(|| std::env::var("VLLM_API_KEY").ok());
1511

1612
let mut config = OpenAIConfig::new().with_api_base(address);
1713
if let Some(api_key) = api_key {

src/llm/voyage.rs

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -33,19 +33,13 @@ pub struct Client {
3333
}
3434

3535
impl Client {
36-
pub fn new(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> Result<Self> {
36+
pub fn new(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> Result<Self> {
3737
if address.is_some() {
3838
api_bail!("Voyage AI doesn't support custom API address");
3939
}
4040

41-
let api_key = if let Some(super::LlmApiConfig::Voyage(config)) = api_config {
42-
if let Some(key) = config.api_key {
43-
key
44-
} else {
45-
std::env::var("VOYAGE_API_KEY").map_err(|_| {
46-
anyhow::anyhow!("VOYAGE_API_KEY environment variable must be set")
47-
})?
48-
}
41+
let api_key = if let Some(key) = api_key {
42+
key
4943
} else {
5044
std::env::var("VOYAGE_API_KEY")
5145
.map_err(|_| anyhow::anyhow!("VOYAGE_API_KEY environment variable must be set"))?

0 commit comments

Comments
 (0)