Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions packages/agent/src/core/toolAgent/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,25 +20,25 @@ export type ModelProvider =
*/
export function getModel(
provider: ModelProvider,
modelName: string,
model: string,
options?: { ollamaBaseUrl?: string },
): LLMProvider {
switch (provider) {
case 'anthropic':
return createProvider('anthropic', modelName);
return createProvider('anthropic', model);
case 'openai':
return createProvider('openai', modelName);
return createProvider('openai', model);
case 'ollama':
if (options?.ollamaBaseUrl) {
return createProvider('ollama', modelName, {
return createProvider('ollama', model, {
baseUrl: options.ollamaBaseUrl,
});
}
return createProvider('ollama', modelName);
return createProvider('ollama', model);
case 'xai':
return createProvider('xai', modelName);
return createProvider('xai', model);
case 'mistral':
return createProvider('mistral', modelName);
return createProvider('mistral', model);
default:
throw new Error(`Unknown model provider: ${provider}`);
}
Expand Down
31 changes: 2 additions & 29 deletions packages/cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,6 @@ mycoder config set githubMode true
# Reset a configuration value to its default
mycoder config clear customPrompt

# Configure model provider and model name
mycoder config set modelProvider openai
mycoder config set modelName gpt-4o-2024-05-13
```

### Model Selection
Expand All @@ -93,33 +90,9 @@ MyCoder supports Anthropic, OpenAI, xAI/Grok, Mistral AI, and Ollama models. You

```bash
# Use Anthropic models [These work the best at this time]
mycoder config set modelProvider anthropic
mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model
mycoder config set provider anthropic
mycoder config set model claude-3-7-sonnet-20250219 # or any other Anthropic model

# Use OpenAI models
mycoder config set modelProvider openai
mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model

# Use xAI/Grok models
mycoder config set modelProvider xai
mycoder config set modelName grok-1 # or any other xAI model

# Use Mistral AI models
mycoder config set modelProvider mistral
mycoder config set modelName mistral-large-latest # or any other Mistral model

# Use Ollama models (local)
mycoder config set modelProvider ollama
mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance

# Configure custom Ollama server URL (default is http://localhost:11434/api)
mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api
```

You can also specify the model provider and name directly when running a command:

```bash
mycoder --modelProvider openai --modelName gpt-4o-2024-05-13 "Your prompt here"
```

### Available Configuration Options
Expand Down
12 changes: 2 additions & 10 deletions packages/cli/src/commands/$default.ts
Original file line number Diff line number Diff line change
Expand Up @@ -96,16 +96,8 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
tokenTracker.tokenCache =
argv.tokenCache !== undefined ? argv.tokenCache : userConfig.tokenCache;

const userModelProvider =
argv.provider ||
argv.modelProvider ||
userConfig.provider ||
userConfig.modelProvider;
const userModelName =
argv.model ||
argv.modelName ||
userConfig.model ||
userConfig.modelName;
const userModelProvider = argv.provider || userConfig.provider;
const userModelName = argv.model || userConfig.model;
const userMaxTokens = argv.maxTokens || userConfig.maxTokens;
const userTemperature = argv.temperature || userConfig.temperature;

Expand Down
15 changes: 0 additions & 15 deletions packages/cli/src/options.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,6 @@ export type SharedOptions = {
readonly sentryDsn?: string;
readonly provider?: string;
readonly model?: string;
// Legacy options - will be removed in a future version
readonly modelProvider?: string;
readonly modelName?: string;
readonly maxTokens?: number;
readonly temperature?: number;
readonly profile?: boolean;
Expand Down Expand Up @@ -41,18 +38,6 @@ export const sharedOptions = {
type: 'string',
description: 'AI model name to use',
} as const,
// Legacy options - will be removed in a future version
modelProvider: {
type: 'string',
description: 'AI model provider to use (deprecated, use provider instead)',
choices: ['anthropic', 'openai', 'ollama', 'xai', 'mistral'],
hidden: true,
} as const,
modelName: {
type: 'string',
description: 'AI model name to use (deprecated, use model instead)',
hidden: true,
} as const,
maxTokens: {
type: 'number',
description: 'Maximum number of tokens to generate',
Expand Down
3 changes: 0 additions & 3 deletions packages/cli/src/settings/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,6 @@ const defaultConfig = {
pageFilter: 'none' as 'simple' | 'none' | 'readability',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
// Legacy names - will be removed in a future version
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
maxTokens: 4096,
temperature: 0.7,
ollamaBaseUrl: 'http://localhost:11434/api',
Expand Down
16 changes: 8 additions & 8 deletions packages/cli/tests/settings/config-defaults.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down Expand Up @@ -99,8 +99,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none', // Default is none
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down Expand Up @@ -138,8 +138,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down Expand Up @@ -189,8 +189,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none', // Default is none
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down
8 changes: 4 additions & 4 deletions packages/cli/tests/settings/config.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ describe('Config', () => {
pageFilter: 'none',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
maxTokens: 4096,
temperature: 0.7,
ollamaBaseUrl: 'http://localhost:11434/api',
Expand Down Expand Up @@ -87,8 +87,8 @@ describe('Config', () => {
pageFilter: 'none',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
maxTokens: 4096,
temperature: 0.7,
ollamaBaseUrl: 'http://localhost:11434/api',
Expand Down
16 changes: 8 additions & 8 deletions packages/cli/tests/settings/configDefaults.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down Expand Up @@ -99,8 +99,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none', // Default is none
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down Expand Up @@ -138,8 +138,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down Expand Up @@ -189,8 +189,8 @@ describe('Config Defaults for CLI Options', () => {
pageFilter: 'none', // Default is none
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
modelProvider: 'anthropic',
modelName: 'claude-3-7-sonnet-20250219',
provider: 'anthropic',
model: 'claude-3-7-sonnet-20250219',
ollamaBaseUrl: 'http://localhost:11434/api',
});

Expand Down
Loading