@@ -130,7 +130,112 @@ export const MODEL_LIST = {
130
130
'mistral-moderation-2411' ,
131
131
'mistral-moderation-latest'
132
132
] ,
133
- deepseek : [ 'deepseek-chat' , 'deepseek-reasoner' ]
133
+ deepseek : [ 'deepseek-chat' , 'deepseek-reasoner' ] ,
134
+
135
+ // AI/ML API available chat-completion models
136
+ // https://api.aimlapi.com/v1/models
137
+ aimlapi : [
138
+ 'openai/gpt-4o' ,
139
+ 'gpt-4o-2024-08-06' ,
140
+ 'gpt-4o-2024-05-13' ,
141
+ 'gpt-4o-mini' ,
142
+ 'gpt-4o-mini-2024-07-18' ,
143
+ 'chatgpt-4o-latest' ,
144
+ 'gpt-4-turbo' ,
145
+ 'gpt-4-turbo-2024-04-09' ,
146
+ 'gpt-4' ,
147
+ 'gpt-4-0125-preview' ,
148
+ 'gpt-4-1106-preview' ,
149
+ 'gpt-3.5-turbo' ,
150
+ 'gpt-3.5-turbo-0125' ,
151
+ 'gpt-3.5-turbo-1106' ,
152
+ 'o1-preview' ,
153
+ 'o1-preview-2024-09-12' ,
154
+ 'o1-mini' ,
155
+ 'o1-mini-2024-09-12' ,
156
+ 'o3-mini' ,
157
+ 'gpt-4o-audio-preview' ,
158
+ 'gpt-4o-mini-audio-preview' ,
159
+ 'gpt-4o-search-preview' ,
160
+ 'gpt-4o-mini-search-preview' ,
161
+ 'openai/gpt-4.1-2025-04-14' ,
162
+ 'openai/gpt-4.1-mini-2025-04-14' ,
163
+ 'openai/gpt-4.1-nano-2025-04-14' ,
164
+ 'openai/o4-mini-2025-04-16' ,
165
+ 'openai/o3-2025-04-16' ,
166
+ 'o1' ,
167
+ 'openai/o3-pro' ,
168
+ 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo' ,
169
+ 'google/gemma-2-27b-it' ,
170
+ 'meta-llama/Llama-Vision-Free' ,
171
+ 'Qwen/Qwen2-72B-Instruct' ,
172
+ 'mistralai/Mixtral-8x7B-Instruct-v0.1' ,
173
+ 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF' ,
174
+ 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' ,
175
+ 'meta-llama/Llama-3.3-70B-Instruct-Turbo' ,
176
+ 'meta-llama/Llama-3.2-3B-Instruct-Turbo' ,
177
+ 'meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo' ,
178
+ 'meta-llama/Llama-Guard-3-11B-Vision-Turbo' ,
179
+ 'Qwen/Qwen2.5-7B-Instruct-Turbo' ,
180
+ 'Qwen/Qwen2.5-Coder-32B-Instruct' ,
181
+ 'meta-llama/Meta-Llama-3-8B-Instruct-Lite' ,
182
+ 'meta-llama/Llama-3-8b-chat-hf' ,
183
+ 'meta-llama/Llama-3-70b-chat-hf' ,
184
+ 'Qwen/Qwen2.5-72B-Instruct-Turbo' ,
185
+ 'Qwen/QwQ-32B' ,
186
+ 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' ,
187
+ 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' ,
188
+ 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' ,
189
+ 'mistralai/Mistral-7B-Instruct-v0.2' ,
190
+ 'meta-llama/LlamaGuard-2-8b' ,
191
+ 'mistralai/Mistral-7B-Instruct-v0.1' ,
192
+ 'mistralai/Mistral-7B-Instruct-v0.3' ,
193
+ 'meta-llama/Meta-Llama-Guard-3-8B' ,
194
+ 'meta-llama/llama-4-scout' ,
195
+ 'meta-llama/llama-4-maverick' ,
196
+ 'Qwen/Qwen3-235B-A22B-fp8-tput' ,
197
+ 'claude-3-opus-20240229' ,
198
+ 'claude-3-haiku-20240307' ,
199
+ 'claude-3-5-sonnet-20240620' ,
200
+ 'claude-3-5-sonnet-20241022' ,
201
+ 'claude-3-5-haiku-20241022' ,
202
+ 'claude-3-7-sonnet-20250219' ,
203
+ 'claude-sonnet-4-20250514' ,
204
+ 'claude-opus-4-20250514' ,
205
+ 'google/gemini-2.0-flash-exp' ,
206
+ 'google/gemini-2.0-flash' ,
207
+ 'google/gemini-2.5-pro' ,
208
+ 'google/gemini-2.5-flash' ,
209
+ 'deepseek-chat' ,
210
+ 'deepseek-reasoner' ,
211
+ 'qwen-max' ,
212
+ 'qwen-plus' ,
213
+ 'qwen-turbo' ,
214
+ 'qwen-max-2025-01-25' ,
215
+ 'mistralai/mistral-tiny' ,
216
+ 'mistralai/mistral-nemo' ,
217
+ 'anthracite-org/magnum-v4-72b' ,
218
+ 'nvidia/llama-3.1-nemotron-70b-instruct' ,
219
+ 'cohere/command-r-plus' ,
220
+ 'mistralai/codestral-2501' ,
221
+ 'google/gemma-3-4b-it' ,
222
+ 'google/gemma-3-12b-it' ,
223
+ 'google/gemma-3-27b-it' ,
224
+ 'google/gemini-2.5-flash-lite-preview' ,
225
+ 'deepseek/deepseek-prover-v2' ,
226
+ 'google/gemma-3n-e4b-it' ,
227
+ 'cohere/command-a' ,
228
+ 'MiniMax-Text-01' ,
229
+ 'abab6.5s-chat' ,
230
+ 'minimax/m1' ,
231
+ 'bagoodex/bagoodex-search-v1' ,
232
+ 'moonshot/kimi-k2-preview' ,
233
+ 'perplexity/sonar' ,
234
+ 'perplexity/sonar-pro' ,
235
+ 'x-ai/grok-4-07-09' ,
236
+ 'x-ai/grok-3-beta' ,
237
+ 'x-ai/grok-3-mini-beta' ,
238
+ ] ,
134
239
} ;
135
240
136
241
const getDefaultModel = ( provider : string | undefined ) : string => {
@@ -149,6 +254,8 @@ const getDefaultModel = (provider: string | undefined): string => {
149
254
return MODEL_LIST . mistral [ 0 ] ;
150
255
case 'deepseek' :
151
256
return MODEL_LIST . deepseek [ 0 ] ;
257
+ case 'aimlapi' :
258
+ return MODEL_LIST . aimlapi [ 0 ] ;
152
259
default :
153
260
return MODEL_LIST . openai [ 0 ] ;
154
261
}
@@ -322,9 +429,10 @@ export const configValidators = {
322
429
'test' ,
323
430
'flowise' ,
324
431
'groq' ,
325
- 'deepseek'
432
+ 'deepseek' ,
433
+ 'aimlapi' ,
326
434
] . includes ( value ) || value . startsWith ( 'ollama' ) ,
327
- `${ value } is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
435
+ `${ value } is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
328
436
) ;
329
437
330
438
return value ;
@@ -372,7 +480,8 @@ export enum OCO_AI_PROVIDER_ENUM {
372
480
GROQ = 'groq' ,
373
481
MISTRAL = 'mistral' ,
374
482
MLX = 'mlx' ,
375
- DEEPSEEK = 'deepseek'
483
+ DEEPSEEK = 'deepseek' ,
484
+ AIMLAPI = 'aimlapi' ,
376
485
}
377
486
378
487
export type ConfigType = {
0 commit comments