Skip to content

Commit 5ef8859

Browse files
committed
Remove hardcoded model list and validation (#85)
1 parent b23dd28 commit 5ef8859

File tree

3 files changed

+7
-34
lines changed

3 files changed

+7
-34
lines changed

packages/agent/src/core/toolAgent/config.ts

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,15 +9,6 @@ import { createOllama, ollama } from 'ollama-ai-provider';
99
*/
1010
export type ModelProvider = 'anthropic' | 'openai' | 'ollama';
1111

12-
/**
13-
* Available models by provider
14-
*/
15-
export const AVAILABLE_MODELS = {
16-
anthropic: ['claude-3-7-sonnet-20250219', 'claude-3-opus-20240229'],
17-
openai: ['gpt-4o-2024-05-13', 'o3-mini-2024-07-18'],
18-
ollama: ['llama3-groq-tool-use'],
19-
};
20-
2112
/**
2213
* Get the model instance based on provider and model name
2314
*/

packages/cli/README.md

Lines changed: 7 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -82,28 +82,20 @@ mycoder config set modelName gpt-4o-2024-05-13
8282

8383
### Model Selection
8484

85-
MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model to use with the following commands:
85+
MyCoder supports Anthropic, OpenAI, and Ollama models. You can configure which model provider and model name to use with the following commands:
8686

8787
```bash
88-
# Use OpenAI's GPT-4o model
88+
# Use OpenAI models
8989
mycoder config set modelProvider openai
90-
mycoder config set modelName gpt-4o-2024-05-13
91-
92-
# Use OpenAI's o3-mini model
93-
mycoder config set modelProvider openai
94-
mycoder config set modelName o3-mini-2024-07-18
95-
96-
# Use Anthropic's Claude 3.7 Sonnet model
97-
mycoder config set modelProvider anthropic
98-
mycoder config set modelName claude-3-7-sonnet-20250219
90+
mycoder config set modelName gpt-4o-2024-05-13 # or any other OpenAI model
9991

100-
# Use Anthropic's Claude 3 Opus model
92+
# Use Anthropic models
10193
mycoder config set modelProvider anthropic
102-
mycoder config set modelName claude-3-opus-20240229
94+
mycoder config set modelName claude-3-7-sonnet-20250219 # or any other Anthropic model
10395

104-
# Use Ollama's llama3-groq-tool-use model (local)
96+
# Use Ollama models (local)
10597
mycoder config set modelProvider ollama
106-
mycoder config set modelName llama3-groq-tool-use
98+
mycoder config set modelName llama3-groq-tool-use # or any other model available in your Ollama instance
10799

108100
# Configure custom Ollama server URL (default is http://localhost:11434/api)
109101
mycoder config set ollamaBaseUrl http://your-ollama-server:11434/api

packages/cli/src/commands/$default.ts

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ import {
1212
subAgentTool,
1313
errorToString,
1414
getModel,
15-
AVAILABLE_MODELS,
1615
DEFAULT_CONFIG,
1716
} from 'mycoder-agent';
1817
import { TokenTracker } from 'mycoder-agent/dist/core/tokens.js';
@@ -111,15 +110,6 @@ export const command: CommandModule<SharedOptions, DefaultArgs> = {
111110
}
112111
// No API key check needed for Ollama as it uses a local server
113112

114-
// Validate model name
115-
if (!AVAILABLE_MODELS[userModelProvider].includes(userModelName)) {
116-
logger.error(
117-
`Invalid model name: ${userModelName} for provider ${userModelProvider}`,
118-
`Available models for ${userModelProvider}: ${AVAILABLE_MODELS[userModelProvider].join(', ')}`,
119-
);
120-
throw new Error(`Invalid model name: ${userModelName}`);
121-
}
122-
123113
let prompt: string | undefined;
124114

125115
// If promptFile is specified, read from file

0 commit comments

Comments
 (0)