diff --git a/packages/types/src/global-settings.ts b/packages/types/src/global-settings.ts index f1c4b81c48..7d2feafa21 100644 --- a/packages/types/src/global-settings.ts +++ b/packages/types/src/global-settings.ts @@ -182,6 +182,7 @@ export const SECRET_STATE_KEYS = [ "geminiApiKey", "openAiNativeApiKey", "cerebrasApiKey", + "cometApiKey", "deepSeekApiKey", "doubaoApiKey", "moonshotApiKey", diff --git a/packages/types/src/provider-settings.ts b/packages/types/src/provider-settings.ts index 4dfeacbf07..c93a4f8e46 100644 --- a/packages/types/src/provider-settings.ts +++ b/packages/types/src/provider-settings.ts @@ -8,6 +8,7 @@ import { cerebrasModels, chutesModels, claudeCodeModels, + cometApiModels, deepSeekModels, doubaoModels, featherlessModels, @@ -34,6 +35,7 @@ import { export const providerNames = [ "anthropic", "claude-code", + "cometapi", "glama", "openrouter", "bedrock", @@ -132,6 +134,12 @@ const claudeCodeSchema = apiModelIdProviderModelSchema.extend({ claudeCodeMaxOutputTokens: z.number().int().min(1).max(200000).optional(), }) +const cometApiSchema = baseProviderSettingsSchema.extend({ + cometApiKey: z.string().optional(), + cometApiModelId: z.string().optional(), + cometApiBaseUrl: z.string().optional(), +}) + const glamaSchema = baseProviderSettingsSchema.extend({ glamaModelId: z.string().optional(), glamaApiKey: z.string().optional(), @@ -346,6 +354,7 @@ const defaultSchema = z.object({ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProvider", [ anthropicSchema.merge(z.object({ apiProvider: z.literal("anthropic") })), claudeCodeSchema.merge(z.object({ apiProvider: z.literal("claude-code") })), + cometApiSchema.merge(z.object({ apiProvider: z.literal("cometapi") })), glamaSchema.merge(z.object({ apiProvider: z.literal("glama") })), openRouterSchema.merge(z.object({ apiProvider: z.literal("openrouter") })), bedrockSchema.merge(z.object({ apiProvider: z.literal("bedrock") })), @@ -387,6 +396,7 @@ export const providerSettingsSchema = z.object({ apiProvider: providerNamesSchema.optional(), ...anthropicSchema.shape, ...claudeCodeSchema.shape, + ...cometApiSchema.shape, ...glamaSchema.shape, ...openRouterSchema.shape, ...bedrockSchema.shape, @@ -503,6 +513,11 @@ export const MODELS_BY_PROVIDER: Record< models: Object.keys(chutesModels), }, "claude-code": { id: "claude-code", label: "Claude Code", models: Object.keys(claudeCodeModels) }, + cometapi: { + id: "cometapi", + label: "CometAPI", + models: Object.keys(cometApiModels), + }, deepseek: { id: "deepseek", label: "DeepSeek", @@ -577,6 +592,7 @@ export const MODELS_BY_PROVIDER: Record< } export const dynamicProviders = [ + "cometapi", "glama", "huggingface", "litellm", diff --git a/packages/types/src/providers/cometapi.ts b/packages/types/src/providers/cometapi.ts new file mode 100644 index 0000000000..ea0ddad34c --- /dev/null +++ b/packages/types/src/providers/cometapi.ts @@ -0,0 +1,51 @@ +import type { ModelInfo } from "../model.js" + +export type CometAPIModelId = string + +export const cometApiDefaultModelId: CometAPIModelId = "claude-sonnet-4-20250514" + +export const cometApiDefaultModelInfo: ModelInfo = { + maxTokens: undefined, // Let system determine based on contextWindow + contextWindow: 200000, // Reasonable default for modern models + supportsImages: false, + supportsPromptCache: false, + // Intentionally not setting inputPrice/outputPrice +} + +// Fallback models for when API is unavailable +// Small helper to create a map of id -> default info +const createModelMap = (ids: readonly CometAPIModelId[]): Record => + Object.fromEntries(ids.map((id) => [id, { ...cometApiDefaultModelInfo }])) as Record + +// Single, complete list for readability and easy maintenance +const COMET_FALLBACK_MODEL_IDS = [ + // OpenAI series + "gpt-5-chat-latest", + "gpt-5-mini", + "gpt-5-nano", + "gpt-4.1-mini", + "gpt-4o-mini", + + // Claude series + "claude-opus-4-1-20250805", + "claude-sonnet-4-20250514", + "claude-3-7-sonnet-latest", + "claude-3-5-haiku-latest", + + // Gemini series + "gemini-2.5-pro", + "gemini-2.5-flash", + "gemini-2.0-flash", + + // DeepSeek series + "deepseek-v3.1", + "deepseek-r1-0528", + "deepseek-reasoner", + + // Other models + "grok-4-0709", + "qwen3-30b-a3b", + "qwen3-coder-plus-2025-07-22", +] as const satisfies readonly CometAPIModelId[] + +export const cometApiModels: Record = createModelMap(COMET_FALLBACK_MODEL_IDS) diff --git a/packages/types/src/providers/index.ts b/packages/types/src/providers/index.ts index 21e43aaa99..6cdc4f4d1f 100644 --- a/packages/types/src/providers/index.ts +++ b/packages/types/src/providers/index.ts @@ -3,6 +3,7 @@ export * from "./bedrock.js" export * from "./cerebras.js" export * from "./chutes.js" export * from "./claude-code.js" +export * from "./cometapi.js" export * from "./deepseek.js" export * from "./doubao.js" export * from "./featherless.js" diff --git a/src/api/index.ts b/src/api/index.ts index ac00967676..6a05277005 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -40,6 +40,7 @@ import { FeatherlessHandler, VercelAiGatewayHandler, DeepInfraHandler, + CometAPIHandler, } from "./providers" import { NativeOllamaHandler } from "./providers/native-ollama" @@ -141,6 +142,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler { return new GroqHandler(options) case "deepinfra": return new DeepInfraHandler(options) + case "cometapi": + return new CometAPIHandler(options) case "huggingface": return new HuggingFaceHandler(options) case "chutes": diff --git a/src/api/providers/cometapi.ts b/src/api/providers/cometapi.ts new file mode 100644 index 0000000000..77a620a294 --- /dev/null +++ b/src/api/providers/cometapi.ts @@ -0,0 +1,148 @@ +import { Anthropic } from "@anthropic-ai/sdk" +import OpenAI from "openai" + +import { cometApiDefaultModelId, cometApiDefaultModelInfo, cometApiModels } from "@roo-code/types" + +import type { ApiHandlerOptions } from "../../shared/api" +import { calculateApiCostOpenAI } from "../../shared/cost" + +import { ApiStream, ApiStreamUsageChunk } from "../transform/stream" +import { convertToOpenAiMessages } from "../transform/openai-format" +import { getModelParams } from "../transform/model-params" + +import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index" +import { RouterProvider } from "./router-provider" + +export class CometAPIHandler extends RouterProvider implements SingleCompletionHandler { + constructor(options: ApiHandlerOptions) { + super({ + options: { + ...options, + // Add custom headers for CometAPI + openAiHeaders: { + "HTTP-Referer": "https://github.com/RooCodeInc/Roo-Code", + "X-Title": "Roo Code", + ...(options.openAiHeaders || {}), + }, + }, + name: "cometapi", + baseURL: options.cometApiBaseUrl || "https://api.cometapi.com/v1", + apiKey: options.cometApiKey || "not-provided", + modelId: options.cometApiModelId, + defaultModelId: cometApiDefaultModelId, + defaultModelInfo: cometApiDefaultModelInfo, + }) + + // Initialize with fallback models to ensure we always have models available + this.models = { ...cometApiModels } + } + + public override async fetchModel() { + // Fetch dynamic models from API, but keep fallback models if API fails + try { + const apiModels = await super.fetchModel() + // Merge API models with fallback models + this.models = { ...cometApiModels, ...this.models } + return apiModels + } catch (error) { + console.warn("CometAPI: Failed to fetch models from API, using fallback models", error) + // Return default model using fallback models + return this.getModel() + } + } + + override getModel() { + const id = this.options.cometApiModelId ?? cometApiDefaultModelId + const info = this.models[id] ?? cometApiDefaultModelInfo + + const params = getModelParams({ + format: "openai", + modelId: id, + model: info, + settings: this.options, + }) + + return { id, info, ...params } + } + + override async *createMessage( + systemPrompt: string, + messages: Anthropic.Messages.MessageParam[], + _metadata?: ApiHandlerCreateMessageMetadata, + ): ApiStream { + // Ensure we have up-to-date model metadata + await this.fetchModel() + const { id: modelId, info, reasoningEffort } = this.getModel() + + const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = { + model: modelId, + messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)], + stream: true, + stream_options: { include_usage: true }, + reasoning_effort: reasoningEffort, + } as OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming + + if (this.supportsTemperature(modelId)) { + requestOptions.temperature = this.options.modelTemperature ?? 0 + } + + if (this.options.includeMaxTokens === true && info.maxTokens) { + ;(requestOptions as any).max_completion_tokens = this.options.modelMaxTokens || info.maxTokens + } + + const { data: stream } = await this.client.chat.completions.create(requestOptions).withResponse() + + let lastUsage: OpenAI.CompletionUsage | undefined + for await (const chunk of stream) { + const delta = chunk.choices[0]?.delta + + if (delta?.content) { + yield { type: "text", text: delta.content } + } + + if (delta && "reasoning_content" in delta && delta.reasoning_content) { + yield { type: "reasoning", text: (delta.reasoning_content as string | undefined) || "" } + } + + if (chunk.usage) { + lastUsage = chunk.usage + } + } + + if (lastUsage) { + const inputTokens = lastUsage.prompt_tokens || 0 + const outputTokens = lastUsage.completion_tokens || 0 + const cacheWriteTokens = lastUsage.prompt_tokens_details?.cached_tokens || 0 + const cacheReadTokens = 0 + + const totalCost = calculateApiCostOpenAI(info, inputTokens, outputTokens, cacheWriteTokens, cacheReadTokens) + + const usage: ApiStreamUsageChunk = { + type: "usage", + inputTokens, + outputTokens, + cacheWriteTokens: cacheWriteTokens || undefined, + cacheReadTokens: cacheReadTokens || undefined, + totalCost, + } + + yield usage + } + } + + async completePrompt(prompt: string): Promise { + const { id: modelId } = this.getModel() + + try { + const response = await this.client.chat.completions.create({ + model: modelId, + messages: [{ role: "user", content: prompt }], + stream: false, + }) + + return response.choices[0]?.message?.content || "" + } catch (error) { + throw new Error(`CometAPI completion error: ${error}`) + } + } +} diff --git a/src/api/providers/fetchers/cometapi.ts b/src/api/providers/fetchers/cometapi.ts new file mode 100644 index 0000000000..8255747b1f --- /dev/null +++ b/src/api/providers/fetchers/cometapi.ts @@ -0,0 +1,199 @@ +import axios from "axios" +import { z } from "zod" + +import type { ModelInfo } from "@roo-code/types" + +import { DEFAULT_HEADERS } from "../constants" + +/** + * CometAPI Models Response Schema + * Be lenient: CometAPI returns { success: true, data: [...] } and may omit OpenAI's `object: "list"`. + */ +// Ignore patterns for non-chat or unsupported models + +// this should be changed to accurately filter chat models based on server-side fields, and remove the local regex-based ignore list and loose parsing logic. + +// TODO(CometAPI): After the official model list interface is upgraded (returning richer type/capability fields or stable OpenAI compatible format) +const COMETAPI_IGNORE_PATTERNS = [ + // Image generation models + "dall-e", + "dalle", + "midjourney", + "mj_", + "stable-diffusion", + "sd-", + "flux-", + "playground-v", + "ideogram", + "recraft-", + "black-forest-labs", + "/recraft-v3", + "recraftv3", + "stability-ai/", + "sdxl", + // Audio generation models + "suno_", + "tts", + "whisper", + // Video generation models + "runway", + "luma_", + "luma-", + "veo", + "kling_", + "minimax_video", + "hunyuan-t1", + // Utility models + "embedding", + "search-gpts", + "files_retrieve", + "moderation", +] + +function escapeRegex(str: string) { + return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&") +} + +const COMETAPI_IGNORE_REGEX = new RegExp(COMETAPI_IGNORE_PATTERNS.map((p) => escapeRegex(p)).join("|"), "i") + +const cometApiModelSchema = z + .object({ + id: z.string(), + object: z.literal("model").optional(), + created: z.number().optional(), + owned_by: z.string().optional(), + // Additional optional fields that CometAPI might provide in the future + max_tokens: z.number().optional(), + max_input_tokens: z.number().optional(), + context_length: z.number().optional(), + }) + .passthrough() + +// Support both OpenAI-like and CometAPI's { success, data } shapes +const cometApiModelsResponseSchema = z.union([ + z.object({ + object: z.literal("list").optional(), + data: z.array(cometApiModelSchema), + }), + z.object({ + success: z.boolean(), + data: z.array(cometApiModelSchema), + }), +]) + +type CometApiModel = z.infer +type CometApiModelsResponse = z.infer + +/** + * Fetch models from CometAPI + */ +export async function getCometApiModels( + apiKey?: string, + baseUrl: string = "https://api.cometapi.com/v1", +): Promise> { + const models: Record = {} + + if (!apiKey || apiKey === "not-provided") { + console.warn("CometAPI: No valid API key provided, skipping model fetch") + return models + } + + if (!baseUrl) { + console.error("CometAPI: No base URL provided") + throw new Error("CometAPI: Base URL is required to fetch models") + } + + try { + const headers: Record = { + ...DEFAULT_HEADERS, + Authorization: `Bearer ${apiKey}`, + Accept: "application/json", + } + + const url = `${baseUrl.replace(/\/$/, "")}/models` + + const response = await axios.get(url, { + headers, + timeout: 15000, // Increased timeout for better reliability + }) + + const parsed = cometApiModelsResponseSchema.safeParse(response.data) + const data = parsed.success ? (parsed.data as any).data : (response.data as any)?.data || [] + + if (!parsed.success) { + console.warn("CometAPI: Unexpected models response shape; proceeding with best-effort parsing.") + console.warn("CometAPI: Zod error:", parsed.error?.format?.()) + console.warn("CometAPI: Raw response data:", response.data) + } + + // Process the model info from the response - similar to DeepInfra/LiteLLM approach + for (const model of data as Array) { + if (!model || typeof model.id !== "string") continue + + // Filter out ignored patterns (non-chat models or unsupported utilities) + if (COMETAPI_IGNORE_REGEX.test(model.id)) { + continue + } + models[model.id] = { + maxTokens: model.max_tokens || undefined, + contextWindow: model.max_input_tokens || model.context_length || 200000, + supportsImages: false, // Not specified in CometAPI models endpoint + supportsPromptCache: false, // Not specified in CometAPI models endpoint + // Intentionally not setting inputPrice/outputPrice as CometAPI doesn't provide this info + } + } + } catch (error) { + if (axios.isAxiosError(error)) { + const status = error.response?.status + const statusText = error.response?.statusText || "" + const code = (error as any).code as string | undefined + + console.error(`CometAPI: API request failed`, { + status, + statusText, + code, + // Do not log headers or API keys + url: error.config?.url, + timeout: error.config?.timeout, + }) + if (error.response) { + console.error(`CometAPI: Response data:`, error.response.data) + } + + let message: string + if (typeof status === "number") { + if (status === 401 || status === 403) { + message = `CometAPI authentication failed (${status}). Please verify your API key and permissions.` + } else if (status === 429) { + message = `CometAPI rate limit exceeded (429). Please slow down or check your plan limits.` + } else if (status >= 500) { + message = `CometAPI server error (${status} ${statusText}). Please try again later.` + } else { + message = `CometAPI request failed (${status} ${statusText}).` + } + } else { + // No HTTP response received: network, DNS, timeout, etc. + if (code === "ECONNABORTED" || code === "ETIMEDOUT" || /timeout/i.test(error.message || "")) { + message = `CometAPI request timed out. Please check your network and base URL (${baseUrl}).` + } else if (code === "ENOTFOUND" || code === "EAI_AGAIN") { + message = `DNS lookup failed for ${baseUrl}. Please verify the domain and your DNS/network settings.` + } else if (code === "ECONNREFUSED") { + message = `Connection refused by ${baseUrl}. Is the service reachable from your network?` + } else if (error.request) { + message = `No response from CometAPI at ${baseUrl}. Please check your network connectivity.` + } else { + message = `Failed to initiate CometAPI request: ${error.message || "Unknown network error"}.` + } + } + + throw new Error(message) + } else { + console.error(`CometAPI: Error fetching models:`, error) + throw new Error( + `Failed to fetch CometAPI models: ${error instanceof Error ? error.message : "An unknown error occurred."}`, + ) + } + } + + return models +} diff --git a/src/api/providers/fetchers/modelCache.ts b/src/api/providers/fetchers/modelCache.ts index a91cdaf994..d569ead56b 100644 --- a/src/api/providers/fetchers/modelCache.ts +++ b/src/api/providers/fetchers/modelCache.ts @@ -20,6 +20,7 @@ import { getOllamaModels } from "./ollama" import { getLMStudioModels } from "./lmstudio" import { getIOIntelligenceModels } from "./io-intelligence" import { getDeepInfraModels } from "./deepinfra" +import { getCometApiModels } from "./cometapi" const memoryCache = new NodeCache({ stdTTL: 5 * 60, checkperiod: 5 * 60 }) async function writeModels(router: RouterName, data: ModelRecord) { @@ -89,6 +90,9 @@ export const getModels = async (options: GetModelsOptions): Promise case "vercel-ai-gateway": models = await getVercelAiGatewayModels() break + case "cometapi": + models = await getCometApiModels(options.apiKey, options.baseUrl) + break default: { // Ensures router is exhaustively checked if RouterName is a strict union const exhaustiveCheck: never = provider diff --git a/src/api/providers/index.ts b/src/api/providers/index.ts index 85d877b6bc..5984d74361 100644 --- a/src/api/providers/index.ts +++ b/src/api/providers/index.ts @@ -4,6 +4,7 @@ export { AwsBedrockHandler } from "./bedrock" export { CerebrasHandler } from "./cerebras" export { ChutesHandler } from "./chutes" export { ClaudeCodeHandler } from "./claude-code" +export { CometAPIHandler } from "./cometapi" export { DeepSeekHandler } from "./deepseek" export { DoubaoHandler } from "./doubao" export { MoonshotHandler } from "./moonshot" diff --git a/src/core/webview/webviewMessageHandler.ts b/src/core/webview/webviewMessageHandler.ts index bd842a08b1..872038d6aa 100644 --- a/src/core/webview/webviewMessageHandler.ts +++ b/src/core/webview/webviewMessageHandler.ts @@ -609,6 +609,19 @@ export const webviewMessageHandler = async ( }) } + // Add CometAPI if API key is provided + const cometApiKey = apiConfiguration.cometApiKey + if (cometApiKey) { + modelFetchPromises.push({ + key: "cometapi", + options: { + provider: "cometapi", + apiKey: cometApiKey, + baseUrl: apiConfiguration.cometApiBaseUrl || "https://api.cometapi.com/v1", + }, + }) + } + const results = await Promise.allSettled( modelFetchPromises.map(async ({ key, options }) => { const models = await safeGetModels(options) diff --git a/src/shared/ProfileValidator.ts b/src/shared/ProfileValidator.ts index 78ff6ed9fe..7b27eaeda0 100644 --- a/src/shared/ProfileValidator.ts +++ b/src/shared/ProfileValidator.ts @@ -92,6 +92,8 @@ export class ProfileValidator { return profile.ioIntelligenceModelId case "deepinfra": return profile.deepInfraModelId + case "cometapi": + return profile.cometApiModelId case "human-relay": case "fake-ai": default: diff --git a/src/shared/api.ts b/src/shared/api.ts index eb3ae124a8..8e86ae7204 100644 --- a/src/shared/api.ts +++ b/src/shared/api.ts @@ -29,6 +29,7 @@ const routerNames = [ "io-intelligence", "deepinfra", "vercel-ai-gateway", + "cometapi", ] as const export type RouterName = (typeof routerNames)[number] @@ -145,6 +146,7 @@ export const getModelMaxOutputTokens = ({ // GetModelsOptions export type GetModelsOptions = + | { provider: "cometapi"; apiKey?: string; baseUrl?: string } | { provider: "openrouter" } | { provider: "glama" } | { provider: "requesty"; apiKey?: string; baseUrl?: string } diff --git a/webview-ui/src/components/settings/ApiOptions.tsx b/webview-ui/src/components/settings/ApiOptions.tsx index 7f2ac4ed7a..02472b1970 100644 --- a/webview-ui/src/components/settings/ApiOptions.tsx +++ b/webview-ui/src/components/settings/ApiOptions.tsx @@ -37,6 +37,7 @@ import { rooDefaultModelId, vercelAiGatewayDefaultModelId, deepInfraDefaultModelId, + cometApiDefaultModelId, } from "@roo-code/types" import { vscode } from "@src/utils/vscode" @@ -68,6 +69,7 @@ import { Cerebras, Chutes, ClaudeCode, + CometAPI, DeepSeek, Doubao, Gemini, @@ -230,6 +232,8 @@ const ApiOptions = ({ vscode.postMessage({ type: "requestRouterModels" }) } else if (selectedProvider === "deepinfra") { vscode.postMessage({ type: "requestRouterModels" }) + } else if (selectedProvider === "cometapi") { + vscode.postMessage({ type: "requestRouterModels" }) } }, 250, @@ -244,6 +248,7 @@ const ApiOptions = ({ apiConfiguration?.litellmApiKey, apiConfiguration?.deepInfraApiKey, apiConfiguration?.deepInfraBaseUrl, + apiConfiguration?.cometApiKey, customHeaders, ], ) @@ -312,6 +317,7 @@ const ApiOptions = ({ > > = { deepinfra: { field: "deepInfraModelId", default: deepInfraDefaultModelId }, + cometapi: { field: "cometApiModelId", default: cometApiDefaultModelId }, openrouter: { field: "openRouterModelId", default: openRouterDefaultModelId }, glama: { field: "glamaModelId", default: glamaDefaultModelId }, unbound: { field: "unboundModelId", default: unboundDefaultModelId }, @@ -505,6 +511,17 @@ const ApiOptions = ({ /> )} + {selectedProvider === "cometapi" && ( + + )} + {selectedProvider === "anthropic" && ( )} diff --git a/webview-ui/src/components/settings/ModelPicker.tsx b/webview-ui/src/components/settings/ModelPicker.tsx index 74e3d31f00..ccaabd2366 100644 --- a/webview-ui/src/components/settings/ModelPicker.tsx +++ b/webview-ui/src/components/settings/ModelPicker.tsx @@ -37,6 +37,7 @@ type ModelIdKey = keyof Pick< | "deepInfraModelId" | "ioIntelligenceModelId" | "vercelAiGatewayModelId" + | "cometApiModelId" > interface ModelPickerProps { diff --git a/webview-ui/src/components/settings/constants.ts b/webview-ui/src/components/settings/constants.ts index ae336730ff..1c321c2a9b 100644 --- a/webview-ui/src/components/settings/constants.ts +++ b/webview-ui/src/components/settings/constants.ts @@ -49,6 +49,7 @@ export const MODELS_BY_PROVIDER: Partial void + routerModels?: RouterModels + refetchRouterModels: () => void + organizationAllowList: OrganizationAllowList + modelValidationError?: string +} + +export const CometAPI = ({ + apiConfiguration, + setApiConfigurationField, + routerModels, + refetchRouterModels, + organizationAllowList, + modelValidationError, +}: CometAPIProps) => { + const { t } = useAppTranslation() + + // Refresh status & error for inline feedback + const [refreshStatus, setRefreshStatus] = useState<"idle" | "loading" | "success" | "error">("idle") + const [refreshError, setRefreshError] = useState() + const cometErrorJustReceived = useRef(false) + + const handleInputChange = useCallback( + ( + field: K, + transform: (event: E) => ProviderSettings[K] = inputEventTransform, + ) => + (event: E | Event) => { + setApiConfigurationField(field, transform(event as E)) + }, + [setApiConfigurationField], + ) + + // Listen for model fetch results to surface errors/success inline (mirrors LiteLLM behavior) + useEffect(() => { + const handleMessage = (event: MessageEvent) => { + const message = event.data + if (message?.type === "singleRouterModelFetchResponse" && !message.success) { + const providerName = message.values?.provider as string + if (providerName === "cometapi") { + cometErrorJustReceived.current = true + setRefreshStatus("error") + setRefreshError(message.error) + } + } else if (message?.type === "routerModels") { + if (refreshStatus === "loading") { + if (!cometErrorJustReceived.current) { + setRefreshStatus("success") + } + // Reset flag for next cycle + cometErrorJustReceived.current = false + } + } + } + + window.addEventListener("message", handleMessage) + return () => { + window.removeEventListener("message", handleMessage) + } + }, [refreshStatus]) + + // When API key changes, outer ApiOptions already debounces requestRouterModels. + // We do an explicit fetch on blur to give immediate feedback and clear cache. + const handleApiKeyBlur = useCallback(() => { + const key = apiConfiguration?.cometApiKey + if (!key) return + setRefreshStatus("loading") + setRefreshError(undefined) + cometErrorJustReceived.current = false + + vscode.postMessage({ type: "flushRouterModels", text: "cometapi" }) + // Trigger a global fetch for all dynamic providers (cheap) – models for cometapi will be included + vscode.postMessage({ type: "requestRouterModels" }) + // Also ping the hook-driven refetch to update local state ASAP + refetchRouterModels() + }, [apiConfiguration?.cometApiKey, refetchRouterModels]) + + // Build model list: dynamic first, fallback ensures non-empty; hide info by not exposing metadata anywhere + const combinedModels = useMemo(() => { + return { ...(cometApiModels as Record), ...(routerModels?.cometapi ?? {}) } + }, [routerModels]) + + const filtered = filterModels(combinedModels, apiConfiguration.apiProvider, organizationAllowList) + const modelIds = useMemo(() => Object.keys(filtered ?? {}).sort((a, b) => a.localeCompare(b)), [filtered]) + + const selected = apiConfiguration.cometApiModelId || cometApiDefaultModelId + + return ( + <> + + + +
+ {t("settings:providers.apiKeyStorageNotice")} +
+ {!apiConfiguration?.cometApiKey && ( + + Get CometAPI Key + + )} + + {refreshStatus === "error" && refreshError && ( +
{refreshError}
+ )} +
+ + + {modelValidationError && ( +
{modelValidationError}
+ )} +
+ , + defaultModelLink: ( + setApiConfigurationField("cometApiModelId", cometApiDefaultModelId)} + className="text-sm" + /> + ), + }} + values={{ serviceName: "CometAPI", defaultModelId: cometApiDefaultModelId }} + /> +
+
+ + ) +} diff --git a/webview-ui/src/components/settings/providers/index.ts b/webview-ui/src/components/settings/providers/index.ts index fe0e6cecf9..148d8e8f53 100644 --- a/webview-ui/src/components/settings/providers/index.ts +++ b/webview-ui/src/components/settings/providers/index.ts @@ -3,6 +3,7 @@ export { Bedrock } from "./Bedrock" export { Cerebras } from "./Cerebras" export { Chutes } from "./Chutes" export { ClaudeCode } from "./ClaudeCode" +export { CometAPI } from "./CometAPI" export { DeepSeek } from "./DeepSeek" export { Doubao } from "./Doubao" export { Gemini } from "./Gemini" diff --git a/webview-ui/src/components/ui/hooks/useSelectedModel.ts b/webview-ui/src/components/ui/hooks/useSelectedModel.ts index b7fe4ff03d..6047678e89 100644 --- a/webview-ui/src/components/ui/hooks/useSelectedModel.ts +++ b/webview-ui/src/components/ui/hooks/useSelectedModel.ts @@ -57,6 +57,8 @@ import { vercelAiGatewayDefaultModelId, BEDROCK_CLAUDE_SONNET_4_MODEL_ID, deepInfraDefaultModelId, + cometApiDefaultModelId, + cometApiModels, } from "@roo-code/types" import type { ModelRecord, RouterModels } from "@roo/api" @@ -274,6 +276,11 @@ function getSelectedModel({ const info = routerModels.deepinfra?.[id] return { id, info } } + case "cometapi": { + const id = apiConfiguration.cometApiModelId ?? cometApiDefaultModelId + const info = routerModels.cometapi?.[id] ?? cometApiModels[id as keyof typeof cometApiModels] + return { id, info } + } case "vscode-lm": { const id = apiConfiguration?.vsCodeLmModelSelector ? `${apiConfiguration.vsCodeLmModelSelector.vendor}/${apiConfiguration.vsCodeLmModelSelector.family}` diff --git a/webview-ui/src/utils/__tests__/validate.test.ts b/webview-ui/src/utils/__tests__/validate.test.ts index c9fb7bfd42..318a3f35ed 100644 --- a/webview-ui/src/utils/__tests__/validate.test.ts +++ b/webview-ui/src/utils/__tests__/validate.test.ts @@ -40,6 +40,7 @@ describe("Model Validation Functions", () => { ollama: {}, lmstudio: {}, deepinfra: {}, + cometapi: {}, "io-intelligence": {}, "vercel-ai-gateway": {}, } diff --git a/webview-ui/src/utils/validate.ts b/webview-ui/src/utils/validate.ts index 58cc8d38e8..9f0ad54c46 100644 --- a/webview-ui/src/utils/validate.ts +++ b/webview-ui/src/utils/validate.ts @@ -57,6 +57,11 @@ function validateModelsAndKeysProvided(apiConfiguration: ProviderSettings): stri return i18next.t("settings:validation.apiKey") } break + case "cometapi": + if (!apiConfiguration.cometApiKey) { + return i18next.t("settings:validation.apiKey") + } + break case "anthropic": if (!apiConfiguration.apiKey) { return i18next.t("settings:validation.apiKey") @@ -202,6 +207,8 @@ function getModelIdForProvider(apiConfiguration: ProviderSettings, provider: str return apiConfiguration.deepInfraModelId case "litellm": return apiConfiguration.litellmModelId + case "cometapi": + return apiConfiguration.cometApiModelId case "openai": return apiConfiguration.openAiModelId case "ollama": @@ -290,6 +297,9 @@ export function validateModelId(apiConfiguration: ProviderSettings, routerModels case "litellm": modelId = apiConfiguration.litellmModelId break + case "cometapi": + modelId = apiConfiguration.cometApiModelId + break case "io-intelligence": modelId = apiConfiguration.ioIntelligenceModelId break