Skip to content

Commit 6b4aaaa

Browse files
authored
Feature: Enable programmatically pass in api_key besides reading from env (#1134)
* Feature:Add tests for json_schema.rs * Feature:Enable programmatically pass in api_key besides reading from env * Moved api_key to Common LlmSpec Layer * Fix formatting issues for GitHub Actions - Fixed Python line length issue in llm.py by breaking long type annotation - Fixed Rust function signature formatting in all LLM client files - Fixed long function call formatting in embed_text.rs - All formatting now complies with project standards * Fix type mismatch error in embed_text.rs - Fixed api_bail! usage in context expecting LlmApiConfig return type - Replaced unwrap_or_else with proper if-let pattern matching - Resolves compilation error in GitHub Actions build test * Fix trailing whitespace formatting issues - Removed trailing whitespace from all LLM client files - Fixed formatting issues in gemini.rs, litellm.rs, openai.rs, openrouter.rs, vllm.rs - Fixed trailing whitespace in embed_text.rs - All files now comply with cargo fmt standards * Feature: Enable programmatically pass in api_key besides reading from env * Feature: Enable programmatically pass in api_key besides reading from env * Feature: Enable programmatically pass in api_key besides reading from env * Feature: Enable programmatically pass in api_key besides reading from env * Feature: Enable programmatically pass in api_key besides reading from env * Feature: Enable programmatically pass in api_key besides reading from env
1 parent 0d675ea commit 6b4aaaa

File tree

13 files changed

+132
-53
lines changed

13 files changed

+132
-53
lines changed

docs/docs/ai/llm.mdx

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -234,6 +234,12 @@ To use the Vertex AI API:
234234
gcloud auth application-default login
235235
```
236236

237+
:::note API Key Not Supported
238+
239+
Vertex AI does **not** support the `api_key` parameter. Vertex AI uses Application Default Credentials (ADC) for authentication instead of API keys. If you provide an `api_key` parameter when using `LlmApiType.VERTEX_AI`, an error will be raised.
240+
241+
:::
242+
237243
Spec for Vertex AI takes additional `api_config` field, in type `cocoindex.llm.VertexAiConfig` with the following fields:
238244
- `project` (type: `str`, required): The project ID of the Google Cloud project.
239245
- `region` (type: `str`, optional): The region of the Google Cloud project. Use `global` if not specified.

python/cocoindex/functions/_engine_builtin_specs.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from typing import Literal
55

66
from .. import llm, op
7+
from ..auth_registry import TransientAuthEntryReference
78

89

910
class ParseJson(op.FunctionSpec):
@@ -56,6 +57,7 @@ class EmbedText(op.FunctionSpec):
5657
output_dimension: int | None = None
5758
task_type: str | None = None
5859
api_config: llm.VertexAiConfig | None = None
60+
api_key: TransientAuthEntryReference[str] | None = None
5961

6062

6163
class ExtractByLlm(op.FunctionSpec):

python/cocoindex/llm.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
from dataclasses import dataclass
22
from enum import Enum
33

4+
from .auth_registry import TransientAuthEntryReference
5+
46

57
class LlmApiType(Enum):
68
"""The type of LLM API to use."""
@@ -44,4 +46,5 @@ class LlmSpec:
4446
api_type: LlmApiType
4547
model: str
4648
address: str | None = None
49+
api_key: TransientAuthEntryReference[str] | None = None
4750
api_config: VertexAiConfig | OpenAiConfig | None = None

rust/cocoindex/src/llm/anthropic.rs

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,14 +14,19 @@ pub struct Client {
1414
}
1515

1616
impl Client {
17-
pub async fn new(address: Option<String>) -> Result<Self> {
17+
pub async fn new(address: Option<String>, api_key: Option<String>) -> Result<Self> {
1818
if address.is_some() {
1919
api_bail!("Anthropic doesn't support custom API address");
2020
}
21-
let api_key = match std::env::var("ANTHROPIC_API_KEY") {
22-
Ok(val) => val,
23-
Err(_) => api_bail!("ANTHROPIC_API_KEY environment variable must be set"),
21+
22+
let api_key = if let Some(key) = api_key {
23+
key
24+
} else {
25+
std::env::var("ANTHROPIC_API_KEY").map_err(|_| {
26+
anyhow::anyhow!("ANTHROPIC_API_KEY environment variable must be set")
27+
})?
2428
};
29+
2530
Ok(Self {
2631
api_key,
2732
client: reqwest::Client::new(),

rust/cocoindex/src/llm/gemini.rs

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -34,14 +34,18 @@ pub struct AiStudioClient {
3434
}
3535

3636
impl AiStudioClient {
37-
pub fn new(address: Option<String>) -> Result<Self> {
37+
pub fn new(address: Option<String>, api_key: Option<String>) -> Result<Self> {
3838
if address.is_some() {
3939
api_bail!("Gemini doesn't support custom API address");
4040
}
41-
let api_key = match std::env::var("GEMINI_API_KEY") {
42-
Ok(val) => val,
43-
Err(_) => api_bail!("GEMINI_API_KEY environment variable must be set"),
41+
42+
let api_key = if let Some(key) = api_key {
43+
key
44+
} else {
45+
std::env::var("GEMINI_API_KEY")
46+
.map_err(|_| anyhow::anyhow!("GEMINI_API_KEY environment variable must be set"))?
4447
};
48+
4549
Ok(Self {
4650
api_key,
4751
client: reqwest::Client::new(),
@@ -253,11 +257,17 @@ static SHARED_RETRY_THROTTLER: LazyLock<SharedRetryThrottler> =
253257
impl VertexAiClient {
254258
pub async fn new(
255259
address: Option<String>,
260+
api_key: Option<String>,
256261
api_config: Option<super::LlmApiConfig>,
257262
) -> Result<Self> {
258263
if address.is_some() {
259264
api_bail!("VertexAi API address is not supported for VertexAi API type");
260265
}
266+
if api_key.is_some() {
267+
api_bail!(
268+
"VertexAi API key is not supported for VertexAi API type. Vertex AI uses Application Default Credentials (ADC) for authentication. Please set up ADC using 'gcloud auth application-default login' instead."
269+
);
270+
}
261271
let Some(super::LlmApiConfig::VertexAi(config)) = api_config else {
262272
api_bail!("VertexAi API config is required for VertexAi API type");
263273
};

rust/cocoindex/src/llm/litellm.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,14 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_litellm(address: Option<String>) -> anyhow::Result<Self> {
7+
pub async fn new_litellm(
8+
address: Option<String>,
9+
api_key: Option<String>,
10+
) -> anyhow::Result<Self> {
811
let address = address.unwrap_or_else(|| "http://127.0.0.1:4000".to_string());
9-
let api_key = std::env::var("LITELLM_API_KEY").ok();
12+
13+
let api_key = api_key.or_else(|| std::env::var("LITELLM_API_KEY").ok());
14+
1015
let mut config = OpenAIConfig::new().with_api_base(address);
1116
if let Some(api_key) = api_key {
1217
config = config.with_api_key(api_key);

rust/cocoindex/src/llm/mod.rs

Lines changed: 26 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ pub struct LlmSpec {
4545
pub api_type: LlmApiType,
4646
pub address: Option<String>,
4747
pub model: String,
48+
pub api_key: Option<spec::AuthEntryReference<String>>,
4849
pub api_config: Option<LlmApiConfig>,
4950
}
5051

@@ -119,61 +120,64 @@ mod voyage;
119120
pub async fn new_llm_generation_client(
120121
api_type: LlmApiType,
121122
address: Option<String>,
123+
api_key: Option<String>,
122124
api_config: Option<LlmApiConfig>,
123125
) -> Result<Box<dyn LlmGenerationClient>> {
124126
let client = match api_type {
125127
LlmApiType::Ollama => {
126128
Box::new(ollama::Client::new(address).await?) as Box<dyn LlmGenerationClient>
127129
}
128-
LlmApiType::OpenAi => {
129-
Box::new(openai::Client::new(address, api_config)?) as Box<dyn LlmGenerationClient>
130-
}
130+
LlmApiType::OpenAi => Box::new(openai::Client::new(address, api_key, api_config)?)
131+
as Box<dyn LlmGenerationClient>,
131132
LlmApiType::Gemini => {
132-
Box::new(gemini::AiStudioClient::new(address)?) as Box<dyn LlmGenerationClient>
133+
Box::new(gemini::AiStudioClient::new(address, api_key)?) as Box<dyn LlmGenerationClient>
133134
}
134-
LlmApiType::VertexAi => Box::new(gemini::VertexAiClient::new(address, api_config).await?)
135-
as Box<dyn LlmGenerationClient>,
136-
LlmApiType::Anthropic => {
137-
Box::new(anthropic::Client::new(address).await?) as Box<dyn LlmGenerationClient>
135+
LlmApiType::VertexAi => {
136+
Box::new(gemini::VertexAiClient::new(address, api_key, api_config).await?)
137+
as Box<dyn LlmGenerationClient>
138138
}
139+
LlmApiType::Anthropic => Box::new(anthropic::Client::new(address, api_key).await?)
140+
as Box<dyn LlmGenerationClient>,
139141
LlmApiType::Bedrock => {
140142
Box::new(bedrock::Client::new(address).await?) as Box<dyn LlmGenerationClient>
141143
}
142-
LlmApiType::LiteLlm => {
143-
Box::new(litellm::Client::new_litellm(address).await?) as Box<dyn LlmGenerationClient>
144-
}
145-
LlmApiType::OpenRouter => Box::new(openrouter::Client::new_openrouter(address).await?)
144+
LlmApiType::LiteLlm => Box::new(litellm::Client::new_litellm(address, api_key).await?)
146145
as Box<dyn LlmGenerationClient>,
146+
LlmApiType::OpenRouter => {
147+
Box::new(openrouter::Client::new_openrouter(address, api_key).await?)
148+
as Box<dyn LlmGenerationClient>
149+
}
147150
LlmApiType::Voyage => {
148151
api_bail!("Voyage is not supported for generation")
149152
}
150-
LlmApiType::Vllm => {
151-
Box::new(vllm::Client::new_vllm(address).await?) as Box<dyn LlmGenerationClient>
152-
}
153+
LlmApiType::Vllm => Box::new(vllm::Client::new_vllm(address, api_key).await?)
154+
as Box<dyn LlmGenerationClient>,
153155
};
154156
Ok(client)
155157
}
156158

157159
pub async fn new_llm_embedding_client(
158160
api_type: LlmApiType,
159161
address: Option<String>,
162+
api_key: Option<String>,
160163
api_config: Option<LlmApiConfig>,
161164
) -> Result<Box<dyn LlmEmbeddingClient>> {
162165
let client = match api_type {
163166
LlmApiType::Ollama => {
164167
Box::new(ollama::Client::new(address).await?) as Box<dyn LlmEmbeddingClient>
165168
}
166169
LlmApiType::Gemini => {
167-
Box::new(gemini::AiStudioClient::new(address)?) as Box<dyn LlmEmbeddingClient>
168-
}
169-
LlmApiType::OpenAi => {
170-
Box::new(openai::Client::new(address, api_config)?) as Box<dyn LlmEmbeddingClient>
170+
Box::new(gemini::AiStudioClient::new(address, api_key)?) as Box<dyn LlmEmbeddingClient>
171171
}
172+
LlmApiType::OpenAi => Box::new(openai::Client::new(address, api_key, api_config)?)
173+
as Box<dyn LlmEmbeddingClient>,
172174
LlmApiType::Voyage => {
173-
Box::new(voyage::Client::new(address)?) as Box<dyn LlmEmbeddingClient>
175+
Box::new(voyage::Client::new(address, api_key)?) as Box<dyn LlmEmbeddingClient>
176+
}
177+
LlmApiType::VertexAi => {
178+
Box::new(gemini::VertexAiClient::new(address, api_key, api_config).await?)
179+
as Box<dyn LlmEmbeddingClient>
174180
}
175-
LlmApiType::VertexAi => Box::new(gemini::VertexAiClient::new(address, api_config).await?)
176-
as Box<dyn LlmEmbeddingClient>,
177181
LlmApiType::OpenRouter
178182
| LlmApiType::LiteLlm
179183
| LlmApiType::Vllm

rust/cocoindex/src/llm/openai.rs

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,11 @@ impl Client {
3131
Self { client }
3232
}
3333

34-
pub fn new(address: Option<String>, api_config: Option<super::LlmApiConfig>) -> Result<Self> {
34+
pub fn new(
35+
address: Option<String>,
36+
api_key: Option<String>,
37+
api_config: Option<super::LlmApiConfig>,
38+
) -> Result<Self> {
3539
let config = match api_config {
3640
Some(super::LlmApiConfig::OpenAi(config)) => config,
3741
Some(_) => api_bail!("unexpected config type, expected OpenAiConfig"),
@@ -48,13 +52,16 @@ impl Client {
4852
if let Some(project_id) = config.project_id {
4953
openai_config = openai_config.with_project_id(project_id);
5054
}
51-
52-
// Verify API key is set
53-
if std::env::var("OPENAI_API_KEY").is_err() {
54-
api_bail!("OPENAI_API_KEY environment variable must be set");
55+
if let Some(key) = api_key {
56+
openai_config = openai_config.with_api_key(key);
57+
} else {
58+
// Verify API key is set in environment if not provided in config
59+
if std::env::var("OPENAI_API_KEY").is_err() {
60+
api_bail!("OPENAI_API_KEY environment variable must be set");
61+
}
5562
}
63+
5664
Ok(Self {
57-
// OpenAI client will use OPENAI_API_KEY and OPENAI_API_BASE env variables by default
5865
client: OpenAIClient::with_config(openai_config),
5966
})
6067
}

rust/cocoindex/src/llm/openrouter.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,14 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_openrouter(address: Option<String>) -> anyhow::Result<Self> {
7+
pub async fn new_openrouter(
8+
address: Option<String>,
9+
api_key: Option<String>,
10+
) -> anyhow::Result<Self> {
811
let address = address.unwrap_or_else(|| "https://openrouter.ai/api/v1".to_string());
9-
let api_key = std::env::var("OPENROUTER_API_KEY").ok();
12+
13+
let api_key = api_key.or_else(|| std::env::var("OPENROUTER_API_KEY").ok());
14+
1015
let mut config = OpenAIConfig::new().with_api_base(address);
1116
if let Some(api_key) = api_key {
1217
config = config.with_api_key(api_key);

rust/cocoindex/src/llm/vllm.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,14 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_vllm(address: Option<String>) -> anyhow::Result<Self> {
7+
pub async fn new_vllm(
8+
address: Option<String>,
9+
api_key: Option<String>,
10+
) -> anyhow::Result<Self> {
811
let address = address.unwrap_or_else(|| "http://127.0.0.1:8000/v1".to_string());
9-
let api_key = std::env::var("VLLM_API_KEY").ok();
12+
13+
let api_key = api_key.or_else(|| std::env::var("VLLM_API_KEY").ok());
14+
1015
let mut config = OpenAIConfig::new().with_api_base(address);
1116
if let Some(api_key) = api_key {
1217
config = config.with_api_key(api_key);

0 commit comments

Comments
 (0)