diff --git a/packages/agent/src/core/toolAgent/toolAgentCore.ts b/packages/agent/src/core/toolAgent/toolAgentCore.ts index a472f07..f05fdba 100644 --- a/packages/agent/src/core/toolAgent/toolAgentCore.ts +++ b/packages/agent/src/core/toolAgent/toolAgentCore.ts @@ -1,7 +1,5 @@ import { CoreMessage, ToolSet, generateText, tool as makeTool } from 'ai'; -import { getAnthropicApiKeyError } from '../../utils/errors.js'; - import { DEFAULT_CONFIG } from './config.js'; import { addCacheControlToMessages, @@ -30,9 +28,6 @@ export const toolAgent = async ( let interactions = 0; - const apiKey = process.env.ANTHROPIC_API_KEY; - if (!apiKey) throw new Error(getAnthropicApiKeyError()); - const messages: CoreMessage[] = [ { role: 'user', diff --git a/packages/agent/src/utils/errors.ts b/packages/agent/src/utils/errors.ts index f1960ea..5751c44 100644 --- a/packages/agent/src/utils/errors.ts +++ b/packages/agent/src/utils/errors.ts @@ -1,10 +1,65 @@ -export const getAnthropicApiKeyError = () => ` -Error: ANTHROPIC_API_KEY environment variable is not set +// Provider configuration map +export const providerConfig: Record< + string, + { keyName: string; docsUrl: string } | undefined +> = { + anthropic: { + keyName: 'ANTHROPIC_API_KEY', + docsUrl: 'https://mycoder.ai/docs/getting-started/anthropic', + }, + openai: { + keyName: 'OPENAI_API_KEY', + docsUrl: 'https://mycoder.ai/docs/getting-started/openai', + }, + xai: { + keyName: 'XAI_API_KEY', + docsUrl: 'https://mycoder.ai/docs/getting-started/xai', + }, + mistral: { + keyName: 'MISTRAL_API_KEY', + docsUrl: 'https://mycoder.ai/docs/getting-started/mistral', + }, + // No API key needed for ollama as it uses a local server + ollama: undefined, +}; -Before using MyCoder, you must have an ANTHROPIC_API_KEY specified either: +/** + * Generates a provider-specific API key error message + * @param provider The LLM provider name + * @returns Error message with provider-specific instructions + */ +export const getProviderApiKeyError = (provider: string): string => { + const config = providerConfig[provider]; -- As an environment variable, "export ANTHROPIC_API_KEY=[your-api-key]" or + if (!config) { + return `Unknown provider: ${provider}`; + } + + const { keyName, docsUrl } = config; + const platform = process.platform; + let osSpecificInstructions = ''; + + if (platform === 'win32') { + osSpecificInstructions = `- Using the windows command prompt, "setx ${keyName}=[your-api-key]"`; + } else if (platform === 'darwin' || platform === 'linux') { + osSpecificInstructions = `- As an environment variable, "export ${keyName}=[your-api-key]"`; + } else { + osSpecificInstructions = `- As an environment variable (platform-specific command)`; + } + + return ` +Error: ${keyName} environment variable is not set + +Before using MyCoder with ${provider} models, you must have a ${keyName} specified. + +You can set it via: +${osSpecificInstructions} - In a .env file in the folder you run "mycoder" from -Get an API key from https://www.anthropic.com/api +For setup instructions, visit: ${docsUrl} `; +}; + +// Legacy function for backward compatibility +export const getAnthropicApiKeyError = () => + getProviderApiKeyError('anthropic'); diff --git a/packages/cli/src/commands/$default.ts b/packages/cli/src/commands/$default.ts index 0530f34..0dff70b 100644 --- a/packages/cli/src/commands/$default.ts +++ b/packages/cli/src/commands/$default.ts @@ -6,7 +6,8 @@ import { toolAgent, Logger, getTools, - getAnthropicApiKeyError, + getProviderApiKeyError, + providerConfig, userPrompt, LogLevel, subAgentTool, @@ -95,33 +96,17 @@ export const command: CommandModule = { const userModelName = argv.modelName || userConfig.modelName; // Early API key check based on model provider - if (userModelProvider === 'anthropic' && !process.env.ANTHROPIC_API_KEY) { - logger.error(getAnthropicApiKeyError()); - throw new Error('Anthropic API key not found'); - } else if ( - userModelProvider === 'openai' && - !process.env.OPENAI_API_KEY - ) { - logger.error( - 'No OpenAI API key found. Please set the OPENAI_API_KEY environment variable.', - 'You can get an API key from https://platform.openai.com/api-keys', - ); - throw new Error('OpenAI API key not found'); - } else if (userModelProvider === 'xai' && !process.env.XAI_API_KEY) { - logger.error( - 'No xAI API key found. Please set the XAI_API_KEY environment variable.', - 'You can get an API key from https://platform.xai.com', - ); - throw new Error('xAI API key not found'); - } else if ( - userModelProvider === 'mistral' && - !process.env.MISTRAL_API_KEY - ) { - logger.error( - 'No Mistral API key found. Please set the MISTRAL_API_KEY environment variable.', - 'You can get an API key from https://console.mistral.ai/api-keys/', - ); - throw new Error('Mistral API key not found'); + const providerSettings = + providerConfig[userModelProvider as keyof typeof providerConfig]; + + if (providerSettings) { + const { keyName } = providerSettings; + const apiKey = process.env[keyName]; + + if (!apiKey) { + logger.error(getProviderApiKeyError(userModelProvider)); + throw new Error(`${userModelProvider} API key not found`); + } } // No API key check needed for Ollama as it uses a local server