@@ -120,7 +120,7 @@ It's possible to configure ECA to be aware of custom LLM providers if they follo
120120
121121Example:
122122
123- ` ~/.config/eca/config.javascript `
123+ ` ~/.config/eca/config.json `
124124``` javascript
125125{
126126 " customProviders" : {
@@ -150,72 +150,75 @@ Example:
150150
151151_ * Either the ` url ` or ` urlEnv ` option is required, and either the ` key ` or ` keyEnv ` option is required._
152152
153- ### Example: Custom LiteLLM server
154153
155- ``` javascript
156- {
157- " customProviders" : {
158- " litellm" : {
159- " api" : " openai-responses" ,
160- " url" : " https://litellm.my-company.com" ,
161- " key" : " your-api-key" ,
162- " models" : [" gpt-5" , " claude-3-sonnet-20240229" , " llama-3-70b" ],
163- " defaultModel" : " gpt-5"
164- }
165- }
166- }
167- ```
168-
169- ### Example: Using environment variables
170-
171- ``` javascript
172- {
173- " customProviders" : {
174- " enterprise" : {
175- " api" : " anthropic" ,
176- " urlEnv" : " ENTERPRISE_LLM_URL" ,
177- " keyEnv" : " ENTERPRISE_LLM_KEY" ,
178- " models" : [" claude-3-opus-20240229" , " claude-3-sonnet-20240229" ],
179- " defaultModel" : " claude-3-sonnet-20240229"
180- }
181- }
182- }
183- ```
154+ After configuring custom providers, the models will be available as ` provider/model ` (e.g., ` openrouter/anthropic/claude-3.5-sonnet ` , ` deepseek/deepseek-chat ` ).
184155
185- ### Example: OpenRouter
156+ ### Examples
186157
187- [ OpenRouter ] ( https://openrouter.ai ) provides access to many models through a unified API:
158+ === "LiteLLM"
188159
189- ``` javascript
190- {
191- " customProviders" : {
192- " openrouter" : {
193- " api" : " openai-chat" ,
194- " url" : " https://openrouter.ai/api/v1" ,
195- " keyEnv" : " OPENROUTER_API_KEY" ,
196- " models" : [" anthropic/claude-3.5-sonnet" , " openai/gpt-4-turbo" , " meta-llama/llama-3.1-405b" ],
197- " defaultModel" : " anthropic/claude-3.5-sonnet"
160+ ```javascript
161+ {
162+ "customProviders": {
163+ "litellm": {
164+ "api": "openai-responses",
165+ "url": "https://litellm.my-company.com",
166+ "key": "your-api-key",
167+ "models": ["gpt-5", "claude-3-sonnet-20240229", "llama-3-70b"],
168+ "defaultModel": "gpt-5"
169+ }
170+ }
198171 }
199- }
200- }
201- ```
202-
203- ### Example: DeepSeek
204-
205- [ DeepSeek] ( https://deepseek.com ) offers powerful reasoning and coding models:
206-
207- ``` javascript
208- {
209- " customProviders" : {
210- " deepseek" : {
211- " api" : " openai-chat" ,
212- " url" : " https://api.deepseek.com" ,
213- " keyEnv" : " DEEPSEEK_API_KEY" ,
214- " models" : [" deepseek-chat" , " deepseek-coder" , " deepseek-reasoner" ],
215- " defaultModel" : " deepseek-chat"
172+ ```
173+
174+ === "Environment variables"
175+
176+ ```javascript
177+ {
178+ "customProviders": {
179+ "enterprise": {
180+ "api": "anthropic",
181+ "urlEnv": "ENTERPRISE_LLM_URL",
182+ "keyEnv": "ENTERPRISE_LLM_KEY",
183+ "models": ["claude-3-opus-20240229", "claude-3-sonnet-20240229"],
184+ "defaultModel": "claude-3-sonnet-20240229"
185+ }
186+ }
216187 }
217- }
218- }
219- ```
220-
221- After configuring custom providers, the models will be available as ` provider/model ` (e.g., ` openrouter/anthropic/claude-3.5-sonnet ` , ` deepseek/deepseek-chat ` ).
188+ ```
189+
190+ === "OpenRouter"
191+
192+ [OpenRouter](https://openrouter.ai) provides access to many models through a unified API:
193+
194+ ```javascript
195+ {
196+ "customProviders": {
197+ "openrouter": {
198+ "api": "openai-chat",
199+ "url": "https://openrouter.ai/api/v1",
200+ "keyEnv": "OPENROUTER_API_KEY",
201+ "models": ["anthropic/claude-3.5-sonnet", "openai/gpt-4-turbo", "meta-llama/llama-3.1-405b"],
202+ "defaultModel": "anthropic/claude-3.5-sonnet"
203+ }
204+ }
205+ }
206+ ```
207+
208+ === "DeepSeek"
209+
210+ [DeepSeek](https://deepseek.com) offers powerful reasoning and coding models:
211+
212+ ```javascript
213+ {
214+ "customProviders": {
215+ "deepseek": {
216+ "api": "openai-chat",
217+ "url": "https://api.deepseek.com",
218+ "keyEnv": "DEEPSEEK_API_KEY",
219+ "models": ["deepseek-chat", "deepseek-coder", "deepseek-reasoner"],
220+ "defaultModel": "deepseek-chat"
221+ }
222+ }
223+ }
224+ ```
0 commit comments