Skip to content
Closed
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions packages/types/src/global-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,7 @@ export const SECRET_STATE_KEYS = [
"geminiApiKey",
"openAiNativeApiKey",
"cerebrasApiKey",
"cometApiKey",
"deepSeekApiKey",
"doubaoApiKey",
"moonshotApiKey",
Expand Down
16 changes: 16 additions & 0 deletions packages/types/src/provider-settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
cerebrasModels,
chutesModels,
claudeCodeModels,
cometApiModels,
deepSeekModels,
doubaoModels,
featherlessModels,
Expand All @@ -34,6 +35,7 @@ import {
export const providerNames = [
"anthropic",
"claude-code",
"cometapi",
"glama",
"openrouter",
"bedrock",
Expand Down Expand Up @@ -132,6 +134,12 @@ const claudeCodeSchema = apiModelIdProviderModelSchema.extend({
claudeCodeMaxOutputTokens: z.number().int().min(1).max(200000).optional(),
})

const cometApiSchema = baseProviderSettingsSchema.extend({
cometApiKey: z.string().optional(),
cometApiModelId: z.string().optional(),
cometApiBaseUrl: z.string().optional(),
})

const glamaSchema = baseProviderSettingsSchema.extend({
glamaModelId: z.string().optional(),
glamaApiKey: z.string().optional(),
Expand Down Expand Up @@ -346,6 +354,7 @@ const defaultSchema = z.object({
export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProvider", [
anthropicSchema.merge(z.object({ apiProvider: z.literal("anthropic") })),
claudeCodeSchema.merge(z.object({ apiProvider: z.literal("claude-code") })),
cometApiSchema.merge(z.object({ apiProvider: z.literal("cometapi") })),
glamaSchema.merge(z.object({ apiProvider: z.literal("glama") })),
openRouterSchema.merge(z.object({ apiProvider: z.literal("openrouter") })),
bedrockSchema.merge(z.object({ apiProvider: z.literal("bedrock") })),
Expand Down Expand Up @@ -387,6 +396,7 @@ export const providerSettingsSchema = z.object({
apiProvider: providerNamesSchema.optional(),
...anthropicSchema.shape,
...claudeCodeSchema.shape,
...cometApiSchema.shape,
...glamaSchema.shape,
...openRouterSchema.shape,
...bedrockSchema.shape,
Expand Down Expand Up @@ -503,6 +513,11 @@ export const MODELS_BY_PROVIDER: Record<
models: Object.keys(chutesModels),
},
"claude-code": { id: "claude-code", label: "Claude Code", models: Object.keys(claudeCodeModels) },
cometapi: {
id: "cometapi",
label: "CometAPI",
models: Object.keys(cometApiModels),
},
deepseek: {
id: "deepseek",
label: "DeepSeek",
Expand Down Expand Up @@ -577,6 +592,7 @@ export const MODELS_BY_PROVIDER: Record<
}

export const dynamicProviders = [
"cometapi",
"glama",
"huggingface",
"litellm",
Expand Down
51 changes: 51 additions & 0 deletions packages/types/src/providers/cometapi.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import type { ModelInfo } from "../model.js"

export type CometAPIModelId = string

export const cometApiDefaultModelId: CometAPIModelId = "claude-sonnet-4-20250514"

export const cometApiDefaultModelInfo: ModelInfo = {
maxTokens: undefined, // Let system determine based on contextWindow
contextWindow: 200000, // Reasonable default for modern models
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is a 200k context window a reasonable default for all models? This seems quite high and might not be accurate for many models. Consider using a more conservative default like 8192 or 16384.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is a 200k context window a reasonable default for all models? This seems quite high and might not be accurate for many models. Consider using a more conservative default like 8192 or 16384.

Here I choose the same default parameters as LiteLLM, which will be improved after the model list interface is upgraded

supportsImages: false,
supportsPromptCache: false,
// Intentionally not setting inputPrice/outputPrice
}

// Fallback models for when API is unavailable
// Small helper to create a map of id -> default info
const createModelMap = (ids: readonly CometAPIModelId[]): Record<CometAPIModelId, ModelInfo> =>
Object.fromEntries(ids.map((id) => [id, { ...cometApiDefaultModelInfo }])) as Record<CometAPIModelId, ModelInfo>

// Single, complete list for readability and easy maintenance
const COMET_FALLBACK_MODEL_IDS = [
// OpenAI series
"gpt-5-chat-latest",
"gpt-5-mini",
"gpt-5-nano",
"gpt-4.1-mini",
"gpt-4o-mini",

// Claude series
"claude-opus-4-1-20250805",
"claude-sonnet-4-20250514",
"claude-3-7-sonnet-latest",
"claude-3-5-haiku-latest",

// Gemini series
"gemini-2.5-pro",
"gemini-2.5-flash",
"gemini-2.0-flash",

// DeepSeek series
"deepseek-v3.1",
"deepseek-r1-0528",
"deepseek-reasoner",

// Other models
"grok-4-0709",
"qwen3-30b-a3b",
"qwen3-coder-plus-2025-07-22",
] as const satisfies readonly CometAPIModelId[]

export const cometApiModels: Record<CometAPIModelId, ModelInfo> = createModelMap(COMET_FALLBACK_MODEL_IDS)
1 change: 1 addition & 0 deletions packages/types/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ export * from "./bedrock.js"
export * from "./cerebras.js"
export * from "./chutes.js"
export * from "./claude-code.js"
export * from "./cometapi.js"
export * from "./deepseek.js"
export * from "./doubao.js"
export * from "./featherless.js"
Expand Down
3 changes: 3 additions & 0 deletions src/api/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ import {
FeatherlessHandler,
VercelAiGatewayHandler,
DeepInfraHandler,
CometAPIHandler,
} from "./providers"
import { NativeOllamaHandler } from "./providers/native-ollama"

Expand Down Expand Up @@ -141,6 +142,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
return new GroqHandler(options)
case "deepinfra":
return new DeepInfraHandler(options)
case "cometapi":
return new CometAPIHandler(options)
case "huggingface":
return new HuggingFaceHandler(options)
case "chutes":
Expand Down
148 changes: 148 additions & 0 deletions src/api/providers/cometapi.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,148 @@
import { Anthropic } from "@anthropic-ai/sdk"
import OpenAI from "openai"

import { cometApiDefaultModelId, cometApiDefaultModelInfo, cometApiModels } from "@roo-code/types"

import type { ApiHandlerOptions } from "../../shared/api"
import { calculateApiCostOpenAI } from "../../shared/cost"

import { ApiStream, ApiStreamUsageChunk } from "../transform/stream"
import { convertToOpenAiMessages } from "../transform/openai-format"
import { getModelParams } from "../transform/model-params"

import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
import { RouterProvider } from "./router-provider"

export class CometAPIHandler extends RouterProvider implements SingleCompletionHandler {
constructor(options: ApiHandlerOptions) {
super({
options: {
...options,
// Add custom headers for CometAPI
openAiHeaders: {
"HTTP-Referer": "https://github.com/RooVetGit/Roo-Code",
"X-Title": "Roo Code",
...(options.openAiHeaders || {}),
},
},
name: "cometapi",
baseURL: options.cometApiBaseUrl || "https://api.cometapi.com/v1",
apiKey: options.cometApiKey || "not-provided",
modelId: options.cometApiModelId,
defaultModelId: cometApiDefaultModelId,
defaultModelInfo: cometApiDefaultModelInfo,
})

// Initialize with fallback models to ensure we always have models available
this.models = { ...cometApiModels }
}

public override async fetchModel() {
// Fetch dynamic models from API, but keep fallback models if API fails
try {
const apiModels = await super.fetchModel()
// Merge API models with fallback models
this.models = { ...cometApiModels, ...this.models }
return apiModels
} catch (error) {
console.warn("CometAPI: Failed to fetch models from API, using fallback models", error)
// Return default model using fallback models
return this.getModel()
}
}

override getModel() {
const id = this.options.cometApiModelId ?? cometApiDefaultModelId
const info = this.models[id] ?? cometApiDefaultModelInfo

const params = getModelParams({
format: "openai",
modelId: id,
model: info,
settings: this.options,
})

return { id, info, ...params }
}

override async *createMessage(
systemPrompt: string,
messages: Anthropic.Messages.MessageParam[],
_metadata?: ApiHandlerCreateMessageMetadata,
): ApiStream {
// Ensure we have up-to-date model metadata
await this.fetchModel()
const { id: modelId, info, reasoningEffort } = this.getModel()

const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
model: modelId,
messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)],
stream: true,
stream_options: { include_usage: true },
reasoning_effort: reasoningEffort,
} as OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming

if (this.supportsTemperature(modelId)) {
requestOptions.temperature = this.options.modelTemperature ?? 0
}

if (this.options.includeMaxTokens === true && info.maxTokens) {
;(requestOptions as any).max_completion_tokens = this.options.modelMaxTokens || info.maxTokens
}

const { data: stream } = await this.client.chat.completions.create(requestOptions).withResponse()

let lastUsage: OpenAI.CompletionUsage | undefined
for await (const chunk of stream) {
const delta = chunk.choices[0]?.delta

if (delta?.content) {
yield { type: "text", text: delta.content }
}

if (delta && "reasoning_content" in delta && delta.reasoning_content) {
yield { type: "reasoning", text: (delta.reasoning_content as string | undefined) || "" }
}

if (chunk.usage) {
lastUsage = chunk.usage
}
}

if (lastUsage) {
const inputTokens = lastUsage.prompt_tokens || 0
const outputTokens = lastUsage.completion_tokens || 0
const cacheWriteTokens = lastUsage.prompt_tokens_details?.cached_tokens || 0
const cacheReadTokens = 0

const totalCost = calculateApiCostOpenAI(info, inputTokens, outputTokens, cacheWriteTokens, cacheReadTokens)

const usage: ApiStreamUsageChunk = {
type: "usage",
inputTokens,
outputTokens,
cacheWriteTokens: cacheWriteTokens || undefined,
cacheReadTokens: cacheReadTokens || undefined,
totalCost,
}

yield usage
}
}

async completePrompt(prompt: string): Promise<string> {
const { id: modelId } = this.getModel()

try {
const response = await this.client.chat.completions.create({
model: modelId,
messages: [{ role: "user", content: prompt }],
stream: false,
})

return response.choices[0]?.message?.content || ""
} catch (error) {
throw new Error(`CometAPI completion error: ${error}`)
}
}
}
Loading
Loading