Skip to content

Commit 2068876

Browse files
feat(config, engine): add support for Mistral AI provider and engine (#436)
* docs(CONTRIBUTING.md): update `TODO.md` reference (#435) Signed-off-by: Emmanuel Ferdman <[email protected]> * feat(config, engine): add support for Mistral AI provider and engine * ``` feat(package): add mistralai and zod dependencies ``` * fix: recreate package-lock.json with node20 * fix: recreate package-lock.json with node v20.18.1 based on branch dev --------- Signed-off-by: Emmanuel Ferdman <[email protected]> Co-authored-by: Emmanuel Ferdman <[email protected]> Co-authored-by: pedro-valentim <>
1 parent 26ebfb4 commit 2068876

File tree

6 files changed

+213
-7
lines changed

6 files changed

+213
-7
lines changed

package-lock.json

Lines changed: 73 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,7 @@
8888
"@clack/prompts": "^0.6.1",
8989
"@dqbd/tiktoken": "^1.0.2",
9090
"@google/generative-ai": "^0.11.4",
91+
"@mistralai/mistralai": "^1.3.5",
9192
"@octokit/webhooks-schemas": "^6.11.0",
9293
"@octokit/webhooks-types": "^6.11.0",
9394
"axios": "^1.3.4",
@@ -99,6 +100,7 @@
99100
"ini": "^3.0.1",
100101
"inquirer": "^9.1.4",
101102
"openai": "^4.57.0",
102-
"punycode": "^2.3.1"
103+
"punycode": "^2.3.1",
104+
"zod": "^3.23.8"
103105
}
104106
}

src/commands/config.ts

Lines changed: 48 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,48 @@ export const MODEL_LIST = {
8686
'llama-3.1-70b-versatile', // Llama 3.1 70B (Preview)
8787
'gemma-7b-it', // Gemma 7B
8888
'gemma2-9b-it' // Gemma 2 9B
89+
],
90+
91+
mistral: [
92+
'ministral-3b-2410',
93+
'ministral-3b-latest',
94+
'ministral-8b-2410',
95+
'ministral-8b-latest',
96+
'open-mistral-7b',
97+
'mistral-tiny',
98+
'mistral-tiny-2312',
99+
'open-mistral-nemo',
100+
'open-mistral-nemo-2407',
101+
'mistral-tiny-2407',
102+
'mistral-tiny-latest',
103+
'open-mixtral-8x7b',
104+
'mistral-small',
105+
'mistral-small-2312',
106+
'open-mixtral-8x22b',
107+
'open-mixtral-8x22b-2404',
108+
'mistral-small-2402',
109+
'mistral-small-2409',
110+
'mistral-small-latest',
111+
'mistral-medium-2312',
112+
'mistral-medium',
113+
'mistral-medium-latest',
114+
'mistral-large-2402',
115+
'mistral-large-2407',
116+
'mistral-large-2411',
117+
'mistral-large-latest',
118+
'pixtral-large-2411',
119+
'pixtral-large-latest',
120+
'codestral-2405',
121+
'codestral-latest',
122+
'codestral-mamba-2407',
123+
'open-codestral-mamba',
124+
'codestral-mamba-latest',
125+
'pixtral-12b-2409',
126+
'pixtral-12b',
127+
'pixtral-12b-latest',
128+
'mistral-embed',
129+
'mistral-moderation-2411',
130+
'mistral-moderation-latest',
89131
]
90132
};
91133

@@ -101,6 +143,8 @@ const getDefaultModel = (provider: string | undefined): string => {
101143
return MODEL_LIST.gemini[0];
102144
case 'groq':
103145
return MODEL_LIST.groq[0];
146+
case 'mistral':
147+
return MODEL_LIST.mistral[0];
104148
default:
105149
return MODEL_LIST.openai[0];
106150
}
@@ -257,14 +301,15 @@ export const configValidators = {
257301
CONFIG_KEYS.OCO_AI_PROVIDER,
258302
[
259303
'openai',
304+
'mistral',
260305
'anthropic',
261306
'gemini',
262307
'azure',
263308
'test',
264309
'flowise',
265310
'groq'
266-
].includes(value) || value.startsWith('ollama') || value.startsWith('mlx'),
267-
`${value} is not supported yet, use 'ollama', 'mlx', anthropic', 'azure', 'gemini', 'flowise' or 'openai' (default)`
311+
].includes(value) || value.startsWith('ollama'),
312+
`${value} is not supported yet, use 'ollama', 'mlx', 'anthropic', 'azure', 'gemini', 'flowise', 'mistral' or 'openai' (default)`
268313
);
269314

270315
return value;
@@ -310,6 +355,7 @@ export enum OCO_AI_PROVIDER_ENUM {
310355
TEST = 'test',
311356
FLOWISE = 'flowise',
312357
GROQ = 'groq',
358+
MISTRAL = 'mistral',
313359
MLX = 'mlx'
314360
}
315361

src/engine/Engine.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ import { OpenAIClient as AzureOpenAIClient } from '@azure/openai';
33
import { GoogleGenerativeAI as GeminiClient } from '@google/generative-ai';
44
import { AxiosInstance as RawAxiosClient } from 'axios';
55
import { OpenAI as OpenAIClient } from 'openai';
6+
import { Mistral as MistralClient } from '@mistralai/mistralai';
67

78
export interface AiEngineConfig {
89
apiKey: string;
@@ -17,7 +18,8 @@ type Client =
1718
| AzureOpenAIClient
1819
| AnthropicClient
1920
| RawAxiosClient
20-
| GeminiClient;
21+
| GeminiClient
22+
| MistralClient;
2123

2224
export interface AiEngine {
2325
config: AiEngineConfig;

src/engine/mistral.ts

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
import axios from 'axios';
2+
import { Mistral } from '@mistralai/mistralai';
3+
import { OpenAI } from 'openai';
4+
import { GenerateCommitMessageErrorEnum } from '../generateCommitMessageFromGitDiff';
5+
import { tokenCount } from '../utils/tokenCount';
6+
import { AiEngine, AiEngineConfig } from './Engine';
7+
import {
8+
AssistantMessage as MistralAssistantMessage,
9+
SystemMessage as MistralSystemMessage,
10+
ToolMessage as MistralToolMessage,
11+
UserMessage as MistralUserMessage
12+
} from '@mistralai/mistralai/models/components';
13+
14+
export interface MistralAiConfig extends AiEngineConfig {}
15+
export type MistralCompletionMessageParam = Array<
16+
| (MistralSystemMessage & { role: "system" })
17+
| (MistralUserMessage & { role: "user" })
18+
| (MistralAssistantMessage & { role: "assistant" })
19+
| (MistralToolMessage & { role: "tool" })
20+
>
21+
22+
export class MistralAiEngine implements AiEngine {
23+
config: MistralAiConfig;
24+
client: Mistral;
25+
26+
constructor(config: MistralAiConfig) {
27+
this.config = config;
28+
29+
if (!config.baseURL) {
30+
this.client = new Mistral({ apiKey: config.apiKey });
31+
} else {
32+
this.client = new Mistral({ apiKey: config.apiKey, serverURL: config.baseURL });
33+
}
34+
}
35+
36+
public generateCommitMessage = async (
37+
messages: Array<OpenAI.Chat.Completions.ChatCompletionMessageParam>
38+
): Promise<string | null> => {
39+
const params = {
40+
model: this.config.model,
41+
messages: messages as MistralCompletionMessageParam,
42+
topP: 0.1,
43+
maxTokens: this.config.maxTokensOutput
44+
};
45+
46+
try {
47+
const REQUEST_TOKENS = messages
48+
.map((msg) => tokenCount(msg.content as string) + 4)
49+
.reduce((a, b) => a + b, 0);
50+
51+
if (
52+
REQUEST_TOKENS >
53+
this.config.maxTokensInput - this.config.maxTokensOutput
54+
)
55+
throw new Error(GenerateCommitMessageErrorEnum.tooMuchTokens);
56+
57+
const completion = await this.client.chat.complete(params);
58+
59+
if (!completion.choices)
60+
throw Error('No completion choice available.')
61+
62+
const message = completion.choices[0].message;
63+
64+
if (!message || !message.content)
65+
throw Error('No completion choice available.')
66+
67+
return message.content as string;
68+
} catch (error) {
69+
const err = error as Error;
70+
if (
71+
axios.isAxiosError<{ error?: { message: string } }>(error) &&
72+
error.response?.status === 401
73+
) {
74+
const mistralError = error.response.data.error;
75+
76+
if (mistralError) throw new Error(mistralError.message);
77+
}
78+
79+
throw err;
80+
}
81+
};
82+
}

src/utils/engine.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ import { FlowiseEngine } from '../engine/flowise';
66
import { GeminiEngine } from '../engine/gemini';
77
import { OllamaEngine } from '../engine/ollama';
88
import { OpenAiEngine } from '../engine/openAi';
9+
import { MistralAiEngine } from '../engine/mistral';
910
import { TestAi, TestMockType } from '../engine/testAi';
1011
import { GroqEngine } from '../engine/groq';
1112
import { MLXEngine } from '../engine/mlx';
@@ -44,6 +45,9 @@ export function getEngine(): AiEngine {
4445
case OCO_AI_PROVIDER_ENUM.GROQ:
4546
return new GroqEngine(DEFAULT_CONFIG);
4647

48+
case OCO_AI_PROVIDER_ENUM.MISTRAL:
49+
return new MistralAiEngine(DEFAULT_CONFIG);
50+
4751
case OCO_AI_PROVIDER_ENUM.MLX:
4852
return new MLXEngine(DEFAULT_CONFIG);
4953

0 commit comments

Comments
 (0)