Skip to content

Commit af0f2c1

Browse files
authored
Merge pull request #505 from D1m7asis/dev-aimlapi
feat: add AIML API provider support
2 parents 7945f44 + c5ce50a commit af0f2c1

File tree

4 files changed

+165
-5
lines changed

4 files changed

+165
-5
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ Create a `.env` file and add OpenCommit config variables there like this:
106106

107107
```env
108108
...
109-
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek>
109+
OCO_AI_PROVIDER=<openai (default), anthropic, azure, ollama, gemini, flowise, deepseek, aimlapi>
110110
OCO_API_KEY=<your OpenAI API token> // or other LLM provider API token
111111
OCO_API_URL=<may be used to set proxy path to OpenAI api>
112112
OCO_TOKENS_MAX_INPUT=<max model token limit (default: 4096)>

src/commands/config.ts

Lines changed: 113 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,112 @@ export const MODEL_LIST = {
130130
'mistral-moderation-2411',
131131
'mistral-moderation-latest'
132132
],
133-
deepseek: ['deepseek-chat', 'deepseek-reasoner']
133+
deepseek: ['deepseek-chat', 'deepseek-reasoner'],
134+
135+
// AI/ML API available chat-completion models
136+
// https://api.aimlapi.com/v1/models
137+
aimlapi: [
138+
'openai/gpt-4o',
139+
'gpt-4o-2024-08-06',
140+
'gpt-4o-2024-05-13',
141+
'gpt-4o-mini',
142+
'gpt-4o-mini-2024-07-18',
143+
'chatgpt-4o-latest',
144+
'gpt-4-turbo',
145+
'gpt-4-turbo-2024-04-09',
146+
'gpt-4',
147+
'gpt-4-0125-preview',
148+
'gpt-4-1106-preview',
149+
'gpt-3.5-turbo',
150+
'gpt-3.5-turbo-0125',
151+
'gpt-3.5-turbo-1106',
152+
'o1-preview',
153+
'o1-preview-2024-09-12',
154+
'o1-mini',
155+
'o1-mini-2024-09-12',
156+
'o3-mini',
157+
'gpt-4o-audio-preview',
158+
'gpt-4o-mini-audio-preview',
159+
'gpt-4o-search-preview',
160+
'gpt-4o-mini-search-preview',
161+
'openai/gpt-4.1-2025-04-14',
162+
'openai/gpt-4.1-mini-2025-04-14',
163+
'openai/gpt-4.1-nano-2025-04-14',
164+
'openai/o4-mini-2025-04-16',
165+
'openai/o3-2025-04-16',
166+
'o1',
167+
'openai/o3-pro',
168+
'meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo',
169+
'google/gemma-2-27b-it',
170+
'meta-llama/Llama-Vision-Free',
171+
'Qwen/Qwen2-72B-Instruct',
172+
'mistralai/Mixtral-8x7B-Instruct-v0.1',
173+
'nvidia/Llama-3.1-Nemotron-70B-Instruct-HF',
174+
'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
175+
'meta-llama/Llama-3.3-70B-Instruct-Turbo',
176+
'meta-llama/Llama-3.2-3B-Instruct-Turbo',
177+
'meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo',
178+
'meta-llama/Llama-Guard-3-11B-Vision-Turbo',
179+
'Qwen/Qwen2.5-7B-Instruct-Turbo',
180+
'Qwen/Qwen2.5-Coder-32B-Instruct',
181+
'meta-llama/Meta-Llama-3-8B-Instruct-Lite',
182+
'meta-llama/Llama-3-8b-chat-hf',
183+
'meta-llama/Llama-3-70b-chat-hf',
184+
'Qwen/Qwen2.5-72B-Instruct-Turbo',
185+
'Qwen/QwQ-32B',
186+
'meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo',
187+
'meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo',
188+
'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo',
189+
'mistralai/Mistral-7B-Instruct-v0.2',
190+
'meta-llama/LlamaGuard-2-8b',
191+
'mistralai/Mistral-7B-Instruct-v0.1',
192+
'mistralai/Mistral-7B-Instruct-v0.3',
193+
'meta-llama/Meta-Llama-Guard-3-8B',
194+
'meta-llama/llama-4-scout',
195+
'meta-llama/llama-4-maverick',
196+
'Qwen/Qwen3-235B-A22B-fp8-tput',
197+
'claude-3-opus-20240229',
198+
'claude-3-haiku-20240307',
199+
'claude-3-5-sonnet-20240620',
200+
'claude-3-5-sonnet-20241022',
201+
'claude-3-5-haiku-20241022',
202+
'claude-3-7-sonnet-20250219',
203+
'claude-sonnet-4-20250514',
204+
'claude-opus-4-20250514',
205+
'google/gemini-2.0-flash-exp',
206+
'google/gemini-2.0-flash',
207+
'google/gemini-2.5-pro',
208+
'google/gemini-2.5-flash',
209+
'deepseek-chat',
210+
'deepseek-reasoner',
211+
'qwen-max',
212+
'qwen-plus',
213+
'qwen-turbo',
214+
'qwen-max-2025-01-25',
215+
'mistralai/mistral-tiny',
216+
'mistralai/mistral-nemo',
217+
'anthracite-org/magnum-v4-72b',
218+
'nvidia/llama-3.1-nemotron-70b-instruct',
219+
'cohere/command-r-plus',
220+
'mistralai/codestral-2501',
221+
'google/gemma-3-4b-it',
222+
'google/gemma-3-12b-it',
223+
'google/gemma-3-27b-it',
224+
'google/gemini-2.5-flash-lite-preview',
225+
'deepseek/deepseek-prover-v2',
226+
'google/gemma-3n-e4b-it',
227+
'cohere/command-a',
228+
'MiniMax-Text-01',
229+
'abab6.5s-chat',
230+
'minimax/m1',
231+
'bagoodex/bagoodex-search-v1',
232+
'moonshot/kimi-k2-preview',
233+
'perplexity/sonar',
234+
'perplexity/sonar-pro',
235+
'x-ai/grok-4-07-09',
236+
'x-ai/grok-3-beta',
237+
'x-ai/grok-3-mini-beta',
238+
],
134239
};
135240

136241
const getDefaultModel = (provider: string | undefined): string => {
@@ -149,6 +254,8 @@ const getDefaultModel = (provider: string | undefined): string => {
149254
return MODEL_LIST.mistral[0];
150255
case 'deepseek':
151256
return MODEL_LIST.deepseek[0];
257+
case 'aimlapi':
258+
return MODEL_LIST.aimlapi[0];
152259
default:
153260
return MODEL_LIST.openai[0];
154261
}
@@ -322,9 +429,10 @@ export const configValidators = {
322429
'test',
323430
'flowise',
324431
'groq',
325-
'deepseek'
432+
'deepseek',
433+
'aimlapi',
326434
].includes(value) || value.startsWith('ollama'),
327-
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek' or 'openai' (default)`
435+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral', 'deepseek', 'aimlapi' or 'openai' (default)`
328436
);
329437

330438
return value;
@@ -372,7 +480,8 @@ export enum OCO_AI_PROVIDER_ENUM {
372480
GROQ = 'groq',
373481
MISTRAL = 'mistral',
374482
MLX = 'mlx',
375-
DEEPSEEK = 'deepseek'
483+
DEEPSEEK = 'deepseek',
484+
AIMLAPI = 'aimlapi',
376485
}
377486

378487
export type ConfigType = {

src/engine/aimlapi.ts

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
import OpenAI from 'openai';
2+
import axios, { AxiosInstance } from 'axios';
3+
import { AiEngine, AiEngineConfig } from './Engine';
4+
5+
interface AimlApiConfig extends AiEngineConfig {}
6+
7+
export class AimlApiEngine implements AiEngine {
8+
client: AxiosInstance;
9+
10+
constructor(public config: AimlApiConfig) {
11+
this.client = axios.create({
12+
baseURL: config.baseURL || 'https://api.aimlapi.com/v1/chat/completions',
13+
headers: {
14+
Authorization: `Bearer ${config.apiKey}`,
15+
'HTTP-Referer': 'https://github.com/di-sukharev/opencommit',
16+
'X-Title': 'opencommit',
17+
'Content-Type': 'application/json',
18+
...config.customHeaders
19+
}
20+
});
21+
}
22+
23+
public generateCommitMessage = async (
24+
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
25+
): Promise<string | null> => {
26+
try {
27+
const response = await this.client.post('', {
28+
model: this.config.model,
29+
messages
30+
});
31+
32+
const message = response.data.choices?.[0]?.message;
33+
return message?.content ?? null;
34+
} catch (error) {
35+
const err = error as Error;
36+
if (
37+
axios.isAxiosError<{ error?: { message: string } }>(error) &&
38+
error.response?.status === 401
39+
) {
40+
const apiError = error.response.data.error;
41+
if (apiError) throw new Error(apiError.message);
42+
}
43+
44+
throw err;
45+
}
46+
};
47+
}

src/utils/engine.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ import { TestAi, TestMockType } from '../engine/testAi';
1111
import { GroqEngine } from '../engine/groq';
1212
import { MLXEngine } from '../engine/mlx';
1313
import { DeepseekEngine } from '../engine/deepseek';
14+
import { AimlApiEngine } from '../engine/aimlapi';
1415

1516
export function getEngine(): AiEngine {
1617
const config = getConfig();
@@ -55,6 +56,9 @@ export function getEngine(): AiEngine {
5556
case OCO_AI_PROVIDER_ENUM.DEEPSEEK:
5657
return new DeepseekEngine(DEFAULT_CONFIG);
5758

59+
case OCO_AI_PROVIDER_ENUM.AIMLAPI:
60+
return new AimlApiEngine(DEFAULT_CONFIG);
61+
5862
default:
5963
return new OpenAiEngine(DEFAULT_CONFIG);
6064
}

0 commit comments

Comments
 (0)