@@ -133,9 +133,113 @@ export const MODEL_LIST = {
133
133
'mistral-moderation-2411' ,
134
134
'mistral-moderation-latest'
135
135
] ,
136
-
137
136
deepseek : [ 'deepseek-chat' , 'deepseek-reasoner' ] ,
138
137
138
+ // AI/ML API available chat-completion models
139
+ // https://api.aimlapi.com/v1/models
140
+ aimlapi : [
141
+ 'openai/gpt-4o' ,
142
+ 'gpt-4o-2024-08-06' ,
143
+ 'gpt-4o-2024-05-13' ,
144
+ 'gpt-4o-mini' ,
145
+ 'gpt-4o-mini-2024-07-18' ,
146
+ 'chatgpt-4o-latest' ,
147
+ 'gpt-4-turbo' ,
148
+ 'gpt-4-turbo-2024-04-09' ,
149
+ 'gpt-4' ,
150
+ 'gpt-4-0125-preview' ,
151
+ 'gpt-4-1106-preview' ,
152
+ 'gpt-3.5-turbo' ,
153
+ 'gpt-3.5-turbo-0125' ,
154
+ 'gpt-3.5-turbo-1106' ,
155
+ 'o1-preview' ,
156
+ 'o1-preview-2024-09-12' ,
157
+ 'o1-mini' ,
158
+ 'o1-mini-2024-09-12' ,
159
+ 'o3-mini' ,
160
+ 'gpt-4o-audio-preview' ,
161
+ 'gpt-4o-mini-audio-preview' ,
162
+ 'gpt-4o-search-preview' ,
163
+ 'gpt-4o-mini-search-preview' ,
164
+ 'openai/gpt-4.1-2025-04-14' ,
165
+ 'openai/gpt-4.1-mini-2025-04-14' ,
166
+ 'openai/gpt-4.1-nano-2025-04-14' ,
167
+ 'openai/o4-mini-2025-04-16' ,
168
+ 'openai/o3-2025-04-16' ,
169
+ 'o1' ,
170
+ 'openai/o3-pro' ,
171
+ 'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo' ,
172
+ 'google/gemma-2-27b-it' ,
173
+ 'meta-llama/Llama-Vision-Free' ,
174
+ 'Qwen/Qwen2-72B-Instruct' ,
175
+ 'mistralai/Mixtral-8x7B-Instruct-v0.1' ,
176
+ 'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF' ,
177
+ 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO' ,
178
+ 'meta-llama/Llama-3.3-70B-Instruct-Turbo' ,
179
+ 'meta-llama/Llama-3.2-3B-Instruct-Turbo' ,
180
+ 'meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo' ,
181
+ 'meta-llama/Llama-Guard-3-11B-Vision-Turbo' ,
182
+ 'Qwen/Qwen2.5-7B-Instruct-Turbo' ,
183
+ 'Qwen/Qwen2.5-Coder-32B-Instruct' ,
184
+ 'meta-llama/Meta-Llama-3-8B-Instruct-Lite' ,
185
+ 'meta-llama/Llama-3-8b-chat-hf' ,
186
+ 'meta-llama/Llama-3-70b-chat-hf' ,
187
+ 'Qwen/Qwen2.5-72B-Instruct-Turbo' ,
188
+ 'Qwen/QwQ-32B' ,
189
+ 'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo' ,
190
+ 'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo' ,
191
+ 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo' ,
192
+ 'mistralai/Mistral-7B-Instruct-v0.2' ,
193
+ 'meta-llama/LlamaGuard-2-8b' ,
194
+ 'mistralai/Mistral-7B-Instruct-v0.1' ,
195
+ 'mistralai/Mistral-7B-Instruct-v0.3' ,
196
+ 'meta-llama/Meta-Llama-Guard-3-8B' ,
197
+ 'meta-llama/llama-4-scout' ,
198
+ 'meta-llama/llama-4-maverick' ,
199
+ 'Qwen/Qwen3-235B-A22B-fp8-tput' ,
200
+ 'claude-3-opus-20240229' ,
201
+ 'claude-3-haiku-20240307' ,
202
+ 'claude-3-5-sonnet-20240620' ,
203
+ 'claude-3-5-sonnet-20241022' ,
204
+ 'claude-3-5-haiku-20241022' ,
205
+ 'claude-3-7-sonnet-20250219' ,
206
+ 'claude-sonnet-4-20250514' ,
207
+ 'claude-opus-4-20250514' ,
208
+ 'google/gemini-2.0-flash-exp' ,
209
+ 'google/gemini-2.0-flash' ,
210
+ 'google/gemini-2.5-pro' ,
211
+ 'google/gemini-2.5-flash' ,
212
+ 'deepseek-chat' ,
213
+ 'deepseek-reasoner' ,
214
+ 'qwen-max' ,
215
+ 'qwen-plus' ,
216
+ 'qwen-turbo' ,
217
+ 'qwen-max-2025-01-25' ,
218
+ 'mistralai/mistral-tiny' ,
219
+ 'mistralai/mistral-nemo' ,
220
+ 'anthracite-org/magnum-v4-72b' ,
221
+ 'nvidia/llama-3.1-nemotron-70b-instruct' ,
222
+ 'cohere/command-r-plus' ,
223
+ 'mistralai/codestral-2501' ,
224
+ 'google/gemma-3-4b-it' ,
225
+ 'google/gemma-3-12b-it' ,
226
+ 'google/gemma-3-27b-it' ,
227
+ 'google/gemini-2.5-flash-lite-preview' ,
228
+ 'deepseek/deepseek-prover-v2' ,
229
+ 'google/gemma-3n-e4b-it' ,
230
+ 'cohere/command-a' ,
231
+ 'MiniMax-Text-01' ,
232
+ 'abab6.5s-chat' ,
233
+ 'minimax/m1' ,
234
+ 'bagoodex/bagoodex-search-v1' ,
235
+ 'moonshot/kimi-k2-preview' ,
236
+ 'perplexity/sonar' ,
237
+ 'perplexity/sonar-pro' ,
238
+ 'x-ai/grok-4-07-09' ,
239
+ 'x-ai/grok-3-beta' ,
240
+ 'x-ai/grok-3-mini-beta'
241
+ ] ,
242
+
139
243
// OpenRouter available models
140
244
// input_modalities: 'text'
141
245
// output_modalities: 'text'
@@ -484,6 +588,8 @@ const getDefaultModel = (provider: string | undefined): string => {
484
588
return MODEL_LIST . mistral [ 0 ] ;
485
589
case 'deepseek' :
486
590
return MODEL_LIST . deepseek [ 0 ] ;
591
+ case 'aimlapi' :
592
+ return MODEL_LIST . aimlapi [ 0 ] ;
487
593
case 'openrouter' :
488
594
return MODEL_LIST . openrouter [ 0 ] ;
489
595
default :
@@ -676,9 +782,10 @@ export const configValidators = {
676
782
'flowise' ,
677
783
'groq' ,
678
784
'deepseek' ,
785
+ 'aimlapi' ,
679
786
'openrouter'
680
787
] . includes ( value ) || value . startsWith ( 'ollama' ) ,
681
- `${ value } is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
788
+ `${ value } is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
682
789
) ;
683
790
684
791
return value ;
@@ -735,6 +842,7 @@ export enum OCO_AI_PROVIDER_ENUM {
735
842
MISTRAL = 'mistral' ,
736
843
MLX = 'mlx' ,
737
844
DEEPSEEK = 'deepseek' ,
845
+ AIMLAPI = 'aimlapi' ,
738
846
OPENROUTER = 'openrouter'
739
847
}
740
848
0 commit comments