Skip to content

Commit 7d4c843

Browse files
committed
Add xAI/Grok and Mistral AI support via Vercel AI SDK
1 parent a70dacb commit 7d4c843

File tree

7 files changed

+133
-5
lines changed

7 files changed

+133
-5
lines changed

packages/agent/package.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,9 @@
4545
"license": "MIT",
4646
"dependencies": {
4747
"@ai-sdk/anthropic": "^1.1.13",
48+
"@ai-sdk/mistral": "^1.1.13",
4849
"@ai-sdk/openai": "^1.2.0",
50+
"@ai-sdk/xai": "^1.1.12",
4951
"@mozilla/readability": "^0.5.0",
5052
"@playwright/test": "^1.50.1",
5153
"@vitest/browser": "^3.0.5",
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
import { describe, expect, it } from 'vitest';
2+
3+
import { getModel } from './config';
4+
5+
describe('getModel', () => {
6+
it('should return the correct model for anthropic', () => {
7+
const model = getModel('anthropic', 'claude-3-7-sonnet-20250219');
8+
expect(model).toBeDefined();
9+
expect(model.provider).toBe('anthropic.messages');
10+
});
11+
12+
it('should return the correct model for openai', () => {
13+
const model = getModel('openai', 'gpt-4o-2024-05-13');
14+
expect(model).toBeDefined();
15+
expect(model.provider).toBe('openai.chat');
16+
});
17+
18+
it('should return the correct model for ollama', () => {
19+
const model = getModel('ollama', 'llama3');
20+
expect(model).toBeDefined();
21+
expect(model.provider).toBe('ollama.chat');
22+
});
23+
24+
it('should return the correct model for xai', () => {
25+
const model = getModel('xai', 'grok-1');
26+
expect(model).toBeDefined();
27+
expect(model.provider).toBe('xai.chat');
28+
});
29+
30+
it('should return the correct model for mistral', () => {
31+
const model = getModel('mistral', 'mistral-large-latest');
32+
expect(model).toBeDefined();
33+
expect(model.provider).toBe('mistral.chat');
34+
});
35+
36+
it('should throw an error for unknown provider', () => {
37+
expect(() => {
38+
// @ts-expect-error Testing invalid provider
39+
getModel('unknown', 'model');
40+
}).toThrow('Unknown model provider: unknown');
41+
});
42+
});

packages/agent/src/core/toolAgent/config.ts

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,20 @@
11
import { execSync } from 'child_process';
22

33
import { anthropic } from '@ai-sdk/anthropic';
4+
import { mistral } from '@ai-sdk/mistral';
45
import { openai } from '@ai-sdk/openai';
6+
import { xai } from '@ai-sdk/xai';
57
import { createOllama, ollama } from 'ollama-ai-provider';
68

79
/**
810
* Available model providers
911
*/
10-
export type ModelProvider = 'anthropic' | 'openai' | 'ollama';
12+
export type ModelProvider =
13+
| 'anthropic'
14+
| 'openai'
15+
| 'ollama'
16+
| 'xai'
17+
| 'mistral';
1118

1219
/**
1320
* Get the model instance based on provider and model name
@@ -29,6 +36,10 @@ export function getModel(
2936
})(modelName);
3037
}
3138
return ollama(modelName);
39+
case 'xai':
40+
return xai(modelName);
41+
case 'mistral':
42+
return mistral(modelName);
3243
default:
3344
throw new Error(`Unknown model provider: ${provider}`);
3445
}

packages/cli/README.md

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ Command-line interface for AI-powered coding tasks.
44

55
## Features
66

7-
- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, and Ollama for intelligent coding assistance
7+
- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, xAI/Grok, Mistral AI, and Ollama for intelligent coding assistance
88
- 🛠️ **Extensible Tool System**: Modular architecture with various tool categories
99
- 🔄 **Parallel Execution**: Ability to spawn sub-agents for concurrent task processing
1010
- 📝 **Self-Modification**: Can modify code, it was built and tested by writing itself
@@ -82,7 +82,7 @@ mycoder config set modelName gpt-4o-2024-05-13
8282

8383
### Model Selection
8484

85-
MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model provider and model name to use with the following commands:
85+
MyCoder supports Anthropic, OpenAI, xAI/Grok, Mistral AI, and Ollama models. You can configure which model provider and model name to use with the following commands:
8686

8787
```bash
8888
# Use OpenAI models
@@ -93,6 +93,14 @@ mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model
9393
mycoder config set modelProvider anthropic
9494
mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model
9595

96+
# Use xAI/Grok models
97+
mycoder config set modelProvider xai
98+
mycoder config set modelName grok-1 # or any other xAI model
99+
100+
# Use Mistral AI models
101+
mycoder config set modelProvider mistral
102+
mycoder config set modelName mistral-large-latest # or any other Mistral model
103+
96104
# Use Ollama models (local)
97105
mycoder config set modelProvider ollama
98106
mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance
@@ -135,6 +143,8 @@ mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api
135143

136144
- `ANTHROPIC_API_KEY`: Your Anthropic API key (required when using Anthropic models)
137145
- `OPENAI_API_KEY`: Your OpenAI API key (required when using OpenAI models)
146+
- `XAI_API_KEY`: Your xAI API key (required when using xAI/Grok models)
147+
- `MISTRAL_API_KEY`: Your Mistral AI API key (required when using Mistral models)
138148

139149
Note: Ollama models do not require an API key as they run locally or on a specified server.
140150

packages/cli/src/commands/$default.ts

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,6 +107,21 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
107107
'You can get an API key from https://platform.openai.com/api-keys',
108108
);
109109
throw new Error('OpenAI API key not found');
110+
} else if (userModelProvider === 'xai' && !process.env.XAI_API_KEY) {
111+
logger.error(
112+
'No xAI API key found. Please set the XAI_API_KEY environment variable.',
113+
'You can get an API key from https://platform.xai.com',
114+
);
115+
throw new Error('xAI API key not found');
116+
} else if (
117+
userModelProvider === 'mistral' &&
118+
!process.env.MISTRAL_API_KEY
119+
) {
120+
logger.error(
121+
'No Mistral API key found. Please set the MISTRAL_API_KEY environment variable.',
122+
'You can get an API key from https://console.mistral.ai/api-keys/',
123+
);
124+
throw new Error('Mistral API key not found');
110125
}
111126
// No API key check needed for Ollama as it uses a local server
112127

@@ -157,7 +172,12 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
157172
const agentConfig = {
158173
...DEFAULT_CONFIG,
159174
model: getModel(
160-
userModelProvider as 'anthropic' | 'openai' | 'ollama',
175+
userModelProvider as
176+
| 'anthropic'
177+
| 'openai'
178+
| 'ollama'
179+
| 'xai'
180+
| 'mistral',
161181
userModelName,
162182
{ ollamaBaseUrl: config.ollamaBaseUrl },
163183
),

packages/cli/src/options.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ export const sharedOptions = {
2222
modelProvider: {
2323
type: 'string',
2424
description: 'AI model provider to use',
25-
choices: ['anthropic', 'openai'],
25+
choices: ['anthropic', 'openai', 'ollama', 'xai', 'mistral'],
2626
} as const,
2727
modelName: {
2828
type: 'string',

pnpm-lock.yaml

Lines changed: 43 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)