File tree Expand file tree Collapse file tree 4 files changed +47
-2
lines changed Expand file tree Collapse file tree 4 files changed +47
-2
lines changed Original file line number Diff line number Diff line change @@ -175,7 +175,7 @@ litellm --config config.yml
175175
176176``` python
177177cocoindex.LlmSpec(
178- api_type = cocoindex.LlmApiType.LITELLM ,
178+ api_type = cocoindex.LlmApiType.LITE_LLM ,
179179 model = " deepseek-r1" ,
180180 address = " http://127.0.0.1:4000" , # default url of LiteLLM
181181)
@@ -186,3 +186,24 @@ cocoindex.LlmSpec(
186186
187187You can find the full list of models supported by LiteLLM [ here] ( https://docs.litellm.ai/docs/providers ) .
188188
189+ ### OpenRouter
190+
191+ To use the OpenRouter API, you need to set the environment variable ` OPENROUTER_API_KEY ` .
192+ You can generate the API key from [ here] ( https://openrouter.ai/settings/keys ) .
193+
194+ A spec for OpenRouter looks like this:
195+
196+ <Tabs >
197+ <TabItem value = " python" label = " Python" default >
198+
199+ ``` python
200+ cocoindex.LlmSpec(
201+ api_type = cocoindex.LlmApiType.OPEN_ROUTER ,
202+ model = " deepseek/deepseek-r1:free" ,
203+ )
204+ ```
205+
206+ </TabItem >
207+ </Tabs >
208+
209+ You can find the full list of models supported by OpenRouter [ here] ( https://openrouter.ai/models ) .
Original file line number Diff line number Diff line change @@ -9,7 +9,8 @@ class LlmApiType(Enum):
99 OLLAMA = "Ollama"
1010 GEMINI = "Gemini"
1111 ANTHROPIC = "Anthropic"
12- LITELLM = "LiteLlm"
12+ LITE_LLM = "LiteLlm"
13+ OPEN_ROUTER = "OpenRouter"
1314
1415
1516@dataclass
Original file line number Diff line number Diff line change @@ -14,6 +14,7 @@ pub enum LlmApiType {
1414 Gemini ,
1515 Anthropic ,
1616 LiteLlm ,
17+ OpenRouter ,
1718}
1819
1920#[ derive( Debug , Clone , Serialize , Deserialize ) ]
@@ -58,6 +59,7 @@ mod gemini;
5859mod ollama;
5960mod openai;
6061mod litellm;
62+ mod openrouter;
6163
6264pub async fn new_llm_generation_client ( spec : LlmSpec ) -> Result < Box < dyn LlmGenerationClient > > {
6365 let client = match spec. api_type {
@@ -76,6 +78,11 @@ pub async fn new_llm_generation_client(spec: LlmSpec) -> Result<Box<dyn LlmGener
7678 LlmApiType :: LiteLlm => {
7779 Box :: new ( litellm:: Client :: new_litellm ( spec) . await ?) as Box < dyn LlmGenerationClient >
7880 }
81+ LlmApiType :: OpenRouter => {
82+ Box :: new ( openrouter:: Client :: new_openrouter ( spec) . await ?) as Box < dyn LlmGenerationClient >
83+ }
84+
85+
7986 } ;
8087 Ok ( client)
8188}
Original file line number Diff line number Diff line change 1+ use async_openai:: config:: OpenAIConfig ;
2+ use async_openai:: Client as OpenAIClient ;
3+
4+ pub use super :: openai:: Client ;
5+
6+ impl Client {
7+ pub async fn new_openrouter ( spec : super :: LlmSpec ) -> anyhow:: Result < Self > {
8+ let address = spec. address . clone ( ) . unwrap_or_else ( || "https://openrouter.ai/api/v1" . to_string ( ) ) ;
9+ let api_key = std:: env:: var ( "OPENROUTER_API_KEY" ) . ok ( ) ;
10+ let mut config = OpenAIConfig :: new ( ) . with_api_base ( address) ;
11+ if let Some ( api_key) = api_key {
12+ config = config. with_api_key ( api_key) ;
13+ }
14+ Ok ( Client :: from_parts ( OpenAIClient :: with_config ( config) , spec. model ) )
15+ }
16+ }
You can’t perform that action at this time.
0 commit comments