diff --git a/.changeset/new-nails-search.md b/.changeset/new-nails-search.md new file mode 100644 index 0000000..8b9b4ff --- /dev/null +++ b/.changeset/new-nails-search.md @@ -0,0 +1,6 @@ +--- +'mycoder-agent': patch +'mycoder': patch +--- + +Add temperature and maxTokens config values diff --git a/packages/agent/src/core/toolAgent/toolAgentCore.ts b/packages/agent/src/core/toolAgent/toolAgentCore.ts index f05fdba..3cbafeb 100644 --- a/packages/agent/src/core/toolAgent/toolAgentCore.ts +++ b/packages/agent/src/core/toolAgent/toolAgentCore.ts @@ -66,6 +66,7 @@ export const toolAgent = async ( const generateTextProps = { model: config.model, temperature: config.temperature, + maxTokens: config.maxTokens, messages: messagesWithCacheControl, tools: toolSet, }; diff --git a/packages/cli/src/commands/$default.ts b/packages/cli/src/commands/$default.ts index 0dff70b..949a9fb 100644 --- a/packages/cli/src/commands/$default.ts +++ b/packages/cli/src/commands/$default.ts @@ -94,6 +94,8 @@ export const command: CommandModule = { const userConfig = getConfig(); const userModelProvider = argv.modelProvider || userConfig.modelProvider; const userModelName = argv.modelName || userConfig.modelName; + const userMaxTokens = argv.maxTokens || userConfig.maxTokens; + const userTemperature = argv.temperature || userConfig.temperature; // Early API key check based on model provider const providerSettings = @@ -166,6 +168,8 @@ export const command: CommandModule = { userModelName, { ollamaBaseUrl: config.ollamaBaseUrl }, ), + maxTokens: userMaxTokens, + temperature: userTemperature, }; const result = await toolAgent(prompt, tools, agentConfig, { diff --git a/packages/cli/src/options.ts b/packages/cli/src/options.ts index d03f5b3..9c8e37a 100644 --- a/packages/cli/src/options.ts +++ b/packages/cli/src/options.ts @@ -9,6 +9,8 @@ export type SharedOptions = { readonly sentryDsn?: string; readonly modelProvider?: string; readonly modelName?: string; + readonly maxTokens?: number; + readonly temperature?: number; readonly profile?: boolean; }; @@ -34,6 +36,14 @@ export const sharedOptions = { type: 'string', description: 'AI model name to use', } as const, + maxTokens: { + type: 'number', + description: 'Maximum number of tokens to generate', + } as const, + temperature: { + type: 'number', + description: 'Temperature for text generation (0.0-1.0)', + } as const, interactive: { type: 'boolean', alias: 'i', diff --git a/packages/cli/src/settings/config.ts b/packages/cli/src/settings/config.ts index 796e037..99ebd4c 100644 --- a/packages/cli/src/settings/config.ts +++ b/packages/cli/src/settings/config.ts @@ -14,6 +14,8 @@ const defaultConfig = { pageFilter: 'none' as 'simple' | 'none' | 'readability', modelProvider: 'anthropic', modelName: 'claude-3-7-sonnet-20250219', + maxTokens: 4096, + temperature: 0.7, ollamaBaseUrl: 'http://localhost:11434/api', customPrompt: '', profile: false, diff --git a/packages/cli/tests/settings/config.test.ts b/packages/cli/tests/settings/config.test.ts index 9af4894..6b691cb 100644 --- a/packages/cli/tests/settings/config.test.ts +++ b/packages/cli/tests/settings/config.test.ts @@ -43,6 +43,8 @@ describe('Config', () => { pageFilter: 'none', modelProvider: 'anthropic', modelName: 'claude-3-7-sonnet-20250219', + maxTokens: 4096, + temperature: 0.7, ollamaBaseUrl: 'http://localhost:11434/api', profile: false, customPrompt: '', @@ -77,6 +79,8 @@ describe('Config', () => { pageFilter: 'none', modelProvider: 'anthropic', modelName: 'claude-3-7-sonnet-20250219', + maxTokens: 4096, + temperature: 0.7, ollamaBaseUrl: 'http://localhost:11434/api', profile: false, customPrompt: '',