Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/agent/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,9 @@
"license": "MIT",
"dependencies": {
"@ai-sdk/anthropic": "^1.1.13",
"@ai-sdk/mistral": "^1.1.13",
"@ai-sdk/openai": "^1.2.0",
"@ai-sdk/xai": "^1.1.12",
"@mozilla/readability": "^0.5.0",
"@playwright/test": "^1.50.1",
"@vitest/browser": "^3.0.5",
Expand Down
42 changes: 42 additions & 0 deletions packages/agent/src/core/toolAgent/config.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import { describe, expect, it } from 'vitest';

import { getModel } from './config';

describe('getModel', () => {
it('should return the correct model for anthropic', () => {
const model = getModel('anthropic', 'claude-3-7-sonnet-20250219');
expect(model).toBeDefined();
expect(model.provider).toBe('anthropic.messages');
});

it('should return the correct model for openai', () => {
const model = getModel('openai', 'gpt-4o-2024-05-13');
expect(model).toBeDefined();
expect(model.provider).toBe('openai.chat');
});

it('should return the correct model for ollama', () => {
const model = getModel('ollama', 'llama3');
expect(model).toBeDefined();
expect(model.provider).toBe('ollama.chat');
});

it('should return the correct model for xai', () => {
const model = getModel('xai', 'grok-1');
expect(model).toBeDefined();
expect(model.provider).toBe('xai.chat');
});

it('should return the correct model for mistral', () => {
const model = getModel('mistral', 'mistral-large-latest');
expect(model).toBeDefined();
expect(model.provider).toBe('mistral.chat');
});

it('should throw an error for unknown provider', () => {
expect(() => {
// @ts-expect-error Testing invalid provider
getModel('unknown', 'model');
}).toThrow('Unknown model provider: unknown');
});
});
13 changes: 12 additions & 1 deletion packages/agent/src/core/toolAgent/config.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,20 @@
import { execSync } from 'child_process';

import { anthropic } from '@ai-sdk/anthropic';
import { mistral } from '@ai-sdk/mistral';
import { openai } from '@ai-sdk/openai';
import { xai } from '@ai-sdk/xai';
import { createOllama, ollama } from 'ollama-ai-provider';

/**
* Available model providers
*/
export type ModelProvider = 'anthropic' | 'openai' | 'ollama';
export type ModelProvider =
| 'anthropic'
| 'openai'
| 'ollama'
| 'xai'
| 'mistral';

/**
* Get the model instance based on provider and model name
Expand All @@ -29,6 +36,10 @@ export function getModel(
})(modelName);
}
return ollama(modelName);
case 'xai':
return xai(modelName);
case 'mistral':
return mistral(modelName);
default:
throw new Error(`Unknown model provider: ${provider}`);
}
Expand Down
14 changes: 12 additions & 2 deletions packages/cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ Command-line interface for AI-powered coding tasks.

## Features

- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, and Ollama for intelligent coding assistance
- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, xAI/Grok, Mistral AI, and Ollama for intelligent coding assistance
- 🛠️ **Extensible Tool System**: Modular architecture with various tool categories
- 🔄 **Parallel Execution**: Ability to spawn sub-agents for concurrent task processing
- 📝 **Self-Modification**: Can modify code, it was built and tested by writing itself
Expand Down Expand Up @@ -82,7 +82,7 @@ mycoder config set modelName gpt-4o-2024-05-13

### Model Selection

MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model provider and model name to use with the following commands:
MyCoder supports Anthropic, OpenAI, xAI/Grok, Mistral AI, and Ollama models. You can configure which model provider and model name to use with the following commands:

```bash
# Use OpenAI models
Expand All @@ -93,6 +93,14 @@ mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model
mycoder config set modelProvider anthropic
mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model

# Use xAI/Grok models
mycoder config set modelProvider xai
mycoder config set modelName grok-1 # or any other xAI model

# Use Mistral AI models
mycoder config set modelProvider mistral
mycoder config set modelName mistral-large-latest # or any other Mistral model

# Use Ollama models (local)
mycoder config set modelProvider ollama
mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance
Expand Down Expand Up @@ -135,6 +143,8 @@ mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api

- `ANTHROPIC_API_KEY`: Your Anthropic API key (required when using Anthropic models)
- `OPENAI_API_KEY`: Your OpenAI API key (required when using OpenAI models)
- `XAI_API_KEY`: Your xAI API key (required when using xAI/Grok models)
- `MISTRAL_API_KEY`: Your Mistral AI API key (required when using Mistral models)

Note: Ollama models do not require an API key as they run locally or on a specified server.

Expand Down
22 changes: 21 additions & 1 deletion packages/cli/src/commands/$default.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,21 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
'You can get an API key from https://platform.openai.com/api-keys',
);
throw new Error('OpenAI API key not found');
} else if (userModelProvider === 'xai' && !process.env.XAI_API_KEY) {
logger.error(
'No xAI API key found. Please set the XAI_API_KEY environment variable.',
'You can get an API key from https://platform.xai.com',
);
throw new Error('xAI API key not found');
} else if (
userModelProvider === 'mistral' &&
!process.env.MISTRAL_API_KEY
) {
logger.error(
'No Mistral API key found. Please set the MISTRAL_API_KEY environment variable.',
'You can get an API key from https://console.mistral.ai/api-keys/',
);
throw new Error('Mistral API key not found');
}
// No API key check needed for Ollama as it uses a local server

Expand Down Expand Up @@ -157,7 +172,12 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
const agentConfig = {
...DEFAULT_CONFIG,
model: getModel(
userModelProvider as 'anthropic' | 'openai' | 'ollama',
userModelProvider as
| 'anthropic'
| 'openai'
| 'ollama'
| 'xai'
| 'mistral',
userModelName,
{ ollamaBaseUrl: config.ollamaBaseUrl },
),
Expand Down
2 changes: 1 addition & 1 deletion packages/cli/src/options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ export const sharedOptions = {
modelProvider: {
type: 'string',
description: 'AI model provider to use',
choices: ['anthropic', 'openai'],
choices: ['anthropic', 'openai', 'ollama', 'xai', 'mistral'],
} as const,
modelName: {
type: 'string',
Expand Down
43 changes: 43 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading