diff --git a/packages/agent/package.json b/packages/agent/package.json index 4afa424..920434a 100644 --- a/packages/agent/package.json +++ b/packages/agent/package.json @@ -45,7 +45,9 @@ "license": "MIT", "dependencies": { "@ai-sdk/anthropic": "^1.1.13", + "@ai-sdk/mistral": "^1.1.13", "@ai-sdk/openai": "^1.2.0", + "@ai-sdk/xai": "^1.1.12", "@mozilla/readability": "^0.5.0", "@playwright/test": "^1.50.1", "@vitest/browser": "^3.0.5", diff --git a/packages/agent/src/core/toolAgent/config.test.ts b/packages/agent/src/core/toolAgent/config.test.ts new file mode 100644 index 0000000..933a20b --- /dev/null +++ b/packages/agent/src/core/toolAgent/config.test.ts @@ -0,0 +1,42 @@ +import { describe, expect, it } from 'vitest'; + +import { getModel } from './config'; + +describe('getModel', () => { + it('should return the correct model for anthropic', () => { + const model = getModel('anthropic', 'claude-3-7-sonnet-20250219'); + expect(model).toBeDefined(); + expect(model.provider).toBe('anthropic.messages'); + }); + + it('should return the correct model for openai', () => { + const model = getModel('openai', 'gpt-4o-2024-05-13'); + expect(model).toBeDefined(); + expect(model.provider).toBe('openai.chat'); + }); + + it('should return the correct model for ollama', () => { + const model = getModel('ollama', 'llama3'); + expect(model).toBeDefined(); + expect(model.provider).toBe('ollama.chat'); + }); + + it('should return the correct model for xai', () => { + const model = getModel('xai', 'grok-1'); + expect(model).toBeDefined(); + expect(model.provider).toBe('xai.chat'); + }); + + it('should return the correct model for mistral', () => { + const model = getModel('mistral', 'mistral-large-latest'); + expect(model).toBeDefined(); + expect(model.provider).toBe('mistral.chat'); + }); + + it('should throw an error for unknown provider', () => { + expect(() => { + // @ts-expect-error Testing invalid provider + getModel('unknown', 'model'); + }).toThrow('Unknown model provider: unknown'); + }); +}); diff --git a/packages/agent/src/core/toolAgent/config.ts b/packages/agent/src/core/toolAgent/config.ts index e21a8db..d1fc8db 100644 --- a/packages/agent/src/core/toolAgent/config.ts +++ b/packages/agent/src/core/toolAgent/config.ts @@ -1,13 +1,20 @@ import { execSync } from 'child_process'; import { anthropic } from '@ai-sdk/anthropic'; +import { mistral } from '@ai-sdk/mistral'; import { openai } from '@ai-sdk/openai'; +import { xai } from '@ai-sdk/xai'; import { createOllama, ollama } from 'ollama-ai-provider'; /** * Available model providers */ -export type ModelProvider = 'anthropic' | 'openai' | 'ollama'; +export type ModelProvider = + | 'anthropic' + | 'openai' + | 'ollama' + | 'xai' + | 'mistral'; /** * Get the model instance based on provider and model name @@ -29,6 +36,10 @@ export function getModel( })(modelName); } return ollama(modelName); + case 'xai': + return xai(modelName); + case 'mistral': + return mistral(modelName); default: throw new Error(`Unknown model provider: ${provider}`); } diff --git a/packages/cli/README.md b/packages/cli/README.md index e3daaf7..dade377 100644 --- a/packages/cli/README.md +++ b/packages/cli/README.md @@ -4,7 +4,7 @@ Command-line interface for AI-powered coding tasks. ## Features -- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, and Ollama for intelligent coding assistance +- 🤖 **AI-Powered**: Leverages Anthropic's Claude, OpenAI models, xAI/Grok, Mistral AI, and Ollama for intelligent coding assistance - 🛠️ **Extensible Tool System**: Modular architecture with various tool categories - 🔄 **Parallel Execution**: Ability to spawn sub-agents for concurrent task processing - 📝 **Self-Modification**: Can modify code, it was built and tested by writing itself @@ -82,7 +82,7 @@ mycoder config set modelName gpt-4o-2024-05-13 ### Model Selection -MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model provider and model name to use with the following commands: +MyCoder supports Anthropic, OpenAI, xAI/Grok, Mistral AI, and Ollama models. You can configure which model provider and model name to use with the following commands: ```bash # Use OpenAI models @@ -93,6 +93,14 @@ mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model mycoder config set modelProvider anthropic mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model +# Use xAI/Grok models +mycoder config set modelProvider xai +mycoder config set modelName grok-1 # or any other xAI model + +# Use Mistral AI models +mycoder config set modelProvider mistral +mycoder config set modelName mistral-large-latest # or any other Mistral model + # Use Ollama models (local) mycoder config set modelProvider ollama mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance @@ -135,6 +143,8 @@ mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api - `ANTHROPIC_API_KEY`: Your Anthropic API key (required when using Anthropic models) - `OPENAI_API_KEY`: Your OpenAI API key (required when using OpenAI models) +- `XAI_API_KEY`: Your xAI API key (required when using xAI/Grok models) +- `MISTRAL_API_KEY`: Your Mistral AI API key (required when using Mistral models) Note: Ollama models do not require an API key as they run locally or on a specified server. diff --git a/packages/cli/src/commands/$default.ts b/packages/cli/src/commands/$default.ts index 6cf7740..da353e1 100644 --- a/packages/cli/src/commands/$default.ts +++ b/packages/cli/src/commands/$default.ts @@ -107,6 +107,21 @@ export const command: CommandModule = { 'You can get an API key from https://platform.openai.com/api-keys', ); throw new Error('OpenAI API key not found'); + } else if (userModelProvider === 'xai' && !process.env.XAI_API_KEY) { + logger.error( + 'No xAI API key found. Please set the XAI_API_KEY environment variable.', + 'You can get an API key from https://platform.xai.com', + ); + throw new Error('xAI API key not found'); + } else if ( + userModelProvider === 'mistral' && + !process.env.MISTRAL_API_KEY + ) { + logger.error( + 'No Mistral API key found. Please set the MISTRAL_API_KEY environment variable.', + 'You can get an API key from https://console.mistral.ai/api-keys/', + ); + throw new Error('Mistral API key not found'); } // No API key check needed for Ollama as it uses a local server @@ -157,7 +172,12 @@ export const command: CommandModule = { const agentConfig = { ...DEFAULT_CONFIG, model: getModel( - userModelProvider as 'anthropic' | 'openai' | 'ollama', + userModelProvider as + | 'anthropic' + | 'openai' + | 'ollama' + | 'xai' + | 'mistral', userModelName, { ollamaBaseUrl: config.ollamaBaseUrl }, ), diff --git a/packages/cli/src/options.ts b/packages/cli/src/options.ts index b5a12a5..cd875a5 100644 --- a/packages/cli/src/options.ts +++ b/packages/cli/src/options.ts @@ -22,7 +22,7 @@ export const sharedOptions = { modelProvider: { type: 'string', description: 'AI model provider to use', - choices: ['anthropic', 'openai'], + choices: ['anthropic', 'openai', 'ollama', 'xai', 'mistral'], } as const, modelName: { type: 'string', diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 63e6459..9afb0c3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -63,9 +63,15 @@ importers: '@ai-sdk/anthropic': specifier: ^1.1.13 version: 1.1.13(zod@3.24.2) + '@ai-sdk/mistral': + specifier: ^1.1.13 + version: 1.1.13(zod@3.24.2) '@ai-sdk/openai': specifier: ^1.2.0 version: 1.2.0(zod@3.24.2) + '@ai-sdk/xai': + specifier: ^1.1.12 + version: 1.1.12(zod@3.24.2) '@mozilla/readability': specifier: ^0.5.0 version: 0.5.0 @@ -188,6 +194,18 @@ packages: peerDependencies: zod: ^3.0.0 + '@ai-sdk/mistral@1.1.13': + resolution: {integrity: sha512-yiDfwX8TaNYWEwGk0FFWJVNAU6SqFjaHBHNEwSp6FP6G4YDKo5mLDeRZw3RqWOlqHVkme4PdgqhkYFl+WNt8MA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + + '@ai-sdk/openai-compatible@0.1.12': + resolution: {integrity: sha512-2bMhAEeiRz4lbW5ixjGjbPhwyqjtujkjLVpqqtqWvvUDvtUM3cw1go9pqWFgaNKSBDaXRUfi8mkAVrn1yRuY2A==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + '@ai-sdk/openai@1.2.0': resolution: {integrity: sha512-tzxH6OxKL5ffts4zJPdziQSJGGpSrQcJmuSrE92jCt7pJ4PAU5Dx4tjNNFIU8lSfwarLnywejZEt3Fz0uQZZOQ==} engines: {node: '>=18'} @@ -228,6 +246,12 @@ packages: zod: optional: true + '@ai-sdk/xai@1.1.12': + resolution: {integrity: sha512-e60KtMDOR7vGV7hPpsar4jY6sw6sUSI6zpCVDQEkVv6B0MUzD1s5DQnCvo6+hnqVjZJHgktIFvc5QwnpVZkXPw==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.0.0 + '@asamuzakjp/css-color@2.8.3': resolution: {integrity: sha512-GIc76d9UI1hCvOATjZPyHFmE5qhRccp3/zGfMPapK3jBi+yocEzp6BBB0UnfRYP9NP4FANqUZYb0hnfs3TM3hw==} @@ -3294,6 +3318,18 @@ snapshots: '@ai-sdk/provider-utils': 2.1.10(zod@3.24.2) zod: 3.24.2 + '@ai-sdk/mistral@1.1.13(zod@3.24.2)': + dependencies: + '@ai-sdk/provider': 1.0.9 + '@ai-sdk/provider-utils': 2.1.10(zod@3.24.2) + zod: 3.24.2 + + '@ai-sdk/openai-compatible@0.1.12(zod@3.24.2)': + dependencies: + '@ai-sdk/provider': 1.0.9 + '@ai-sdk/provider-utils': 2.1.10(zod@3.24.2) + zod: 3.24.2 + '@ai-sdk/openai@1.2.0(zod@3.24.2)': dependencies: '@ai-sdk/provider': 1.0.9 @@ -3331,6 +3367,13 @@ snapshots: optionalDependencies: zod: 3.24.2 + '@ai-sdk/xai@1.1.12(zod@3.24.2)': + dependencies: + '@ai-sdk/openai-compatible': 0.1.12(zod@3.24.2) + '@ai-sdk/provider': 1.0.9 + '@ai-sdk/provider-utils': 2.1.10(zod@3.24.2) + zod: 3.24.2 + '@asamuzakjp/css-color@2.8.3': dependencies: '@csstools/css-calc': 2.1.2(@csstools/css-parser-algorithms@3.0.4(@csstools/css-tokenizer@3.0.3))(@csstools/css-tokenizer@3.0.3)