Skip to content

Commit f7b4dcb

Browse files
committed
Fix formatting issues for GitHub Actions
- Fixed Python line length issue in llm.py by breaking long type annotation - Fixed Rust function signature formatting in all LLM client files - Fixed long function call formatting in embed_text.rs - All formatting now complies with project standards
1 parent 0059fd8 commit f7b4dcb

File tree

8 files changed

+48
-9
lines changed

8 files changed

+48
-9
lines changed

python/cocoindex/llm.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -86,4 +86,14 @@ class LlmSpec:
8686
model: str
8787
address: str | None = None
8888
api_key: str | None = None
89-
api_config: VertexAiConfig | OpenAiConfig | AnthropicConfig | GeminiConfig | VoyageConfig | LiteLlmConfig | OpenRouterConfig | VllmConfig | None = None
89+
api_config: (
90+
VertexAiConfig
91+
| OpenAiConfig
92+
| AnthropicConfig
93+
| GeminiConfig
94+
| VoyageConfig
95+
| LiteLlmConfig
96+
| OpenRouterConfig
97+
| VllmConfig
98+
| None
99+
) = None

src/llm/gemini.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,11 @@ pub struct AiStudioClient {
3030
}
3131

3232
impl AiStudioClient {
33-
pub fn new(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> Result<Self> {
33+
pub fn new(
34+
address: Option<String>,
35+
api_key: Option<String>,
36+
_api_config: Option<super::LlmApiConfig>,
37+
) -> Result<Self> {
3438
if address.is_some() {
3539
api_bail!("Gemini doesn't support custom API address");
3640
}

src/llm/litellm.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,11 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_litellm(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
7+
pub async fn new_litellm(
8+
address: Option<String>,
9+
api_key: Option<String>,
10+
_api_config: Option<super::LlmApiConfig>,
11+
) -> anyhow::Result<Self> {
812
let address = address.unwrap_or_else(|| "http://127.0.0.1:4000".to_string());
913

1014
let api_key = api_key.or_else(|| std::env::var("LITELLM_API_KEY").ok());

src/llm/openai.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,11 @@ impl Client {
3333
Self { client }
3434
}
3535

36-
pub fn new(address: Option<String>, api_key: Option<String>, api_config: Option<super::LlmApiConfig>) -> Result<Self> {
36+
pub fn new(
37+
address: Option<String>,
38+
api_key: Option<String>,
39+
api_config: Option<super::LlmApiConfig>,
40+
) -> Result<Self> {
3741
let config = match api_config {
3842
Some(super::LlmApiConfig::OpenAi(config)) => config,
3943
Some(_) => api_bail!("unexpected config type, expected OpenAiConfig"),

src/llm/openrouter.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,11 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_openrouter(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
7+
pub async fn new_openrouter(
8+
address: Option<String>,
9+
api_key: Option<String>,
10+
_api_config: Option<super::LlmApiConfig>,
11+
) -> anyhow::Result<Self> {
812
let address = address.unwrap_or_else(|| "https://openrouter.ai/api/v1".to_string());
913

1014
let api_key = api_key.or_else(|| std::env::var("OPENROUTER_API_KEY").ok());

src/llm/vllm.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,11 @@ use async_openai::config::OpenAIConfig;
44
pub use super::openai::Client;
55

66
impl Client {
7-
pub async fn new_vllm(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> anyhow::Result<Self> {
7+
pub async fn new_vllm(
8+
address: Option<String>,
9+
api_key: Option<String>,
10+
_api_config: Option<super::LlmApiConfig>,
11+
) -> anyhow::Result<Self> {
812
let address = address.unwrap_or_else(|| "http://127.0.0.1:8000/v1".to_string());
913

1014
let api_key = api_key.or_else(|| std::env::var("VLLM_API_KEY").ok());

src/llm/voyage.rs

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,11 @@ pub struct Client {
3333
}
3434

3535
impl Client {
36-
pub fn new(address: Option<String>, api_key: Option<String>, _api_config: Option<super::LlmApiConfig>) -> Result<Self> {
36+
pub fn new(
37+
address: Option<String>,
38+
api_key: Option<String>,
39+
_api_config: Option<super::LlmApiConfig>,
40+
) -> Result<Self> {
3741
if address.is_some() {
3842
api_bail!("Voyage AI doesn't support custom API address");
3943
}

src/ops/functions/embed_text.rs

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -129,8 +129,13 @@ impl SimpleFunctionFactoryBase for Factory {
129129
spec.api_config.clone()
130130
};
131131

132-
let client =
133-
new_llm_embedding_client(spec.api_type, spec.address.clone(), spec.api_key.clone(), api_config).await?;
132+
let client = new_llm_embedding_client(
133+
spec.api_type,
134+
spec.address.clone(),
135+
spec.api_key.clone(),
136+
api_config,
137+
)
138+
.await?;
134139
let output_dimension = match spec.output_dimension {
135140
Some(output_dimension) => output_dimension,
136141
None => {

0 commit comments

Comments
 (0)