Skip to content

Commit 5dd0f70

Browse files
committed
feat: add CometAPI as new model provider
1 parent 90e7d09 commit 5dd0f70

File tree

20 files changed

+616
-0
lines changed

20 files changed

+616
-0
lines changed

packages/types/src/global-settings.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -182,6 +182,7 @@ export const SECRET_STATE_KEYS = [
182182
"geminiApiKey",
183183
"openAiNativeApiKey",
184184
"cerebrasApiKey",
185+
"cometApiKey",
185186
"deepSeekApiKey",
186187
"doubaoApiKey",
187188
"moonshotApiKey",

packages/types/src/provider-settings.ts

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import {
88
cerebrasModels,
99
chutesModels,
1010
claudeCodeModels,
11+
cometApiModels,
1112
deepSeekModels,
1213
doubaoModels,
1314
featherlessModels,
@@ -34,6 +35,7 @@ import {
3435
export const providerNames = [
3536
"anthropic",
3637
"claude-code",
38+
"cometapi",
3739
"glama",
3840
"openrouter",
3941
"bedrock",
@@ -132,6 +134,12 @@ const claudeCodeSchema = apiModelIdProviderModelSchema.extend({
132134
claudeCodeMaxOutputTokens: z.number().int().min(1).max(200000).optional(),
133135
})
134136

137+
const cometApiSchema = baseProviderSettingsSchema.extend({
138+
cometApiKey: z.string().optional(),
139+
cometApiModelId: z.string().optional(),
140+
cometApiBaseUrl: z.string().optional(),
141+
})
142+
135143
const glamaSchema = baseProviderSettingsSchema.extend({
136144
glamaModelId: z.string().optional(),
137145
glamaApiKey: z.string().optional(),
@@ -346,6 +354,7 @@ const defaultSchema = z.object({
346354
export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProvider", [
347355
anthropicSchema.merge(z.object({ apiProvider: z.literal("anthropic") })),
348356
claudeCodeSchema.merge(z.object({ apiProvider: z.literal("claude-code") })),
357+
cometApiSchema.merge(z.object({ apiProvider: z.literal("cometapi") })),
349358
glamaSchema.merge(z.object({ apiProvider: z.literal("glama") })),
350359
openRouterSchema.merge(z.object({ apiProvider: z.literal("openrouter") })),
351360
bedrockSchema.merge(z.object({ apiProvider: z.literal("bedrock") })),
@@ -387,6 +396,7 @@ export const providerSettingsSchema = z.object({
387396
apiProvider: providerNamesSchema.optional(),
388397
...anthropicSchema.shape,
389398
...claudeCodeSchema.shape,
399+
...cometApiSchema.shape,
390400
...glamaSchema.shape,
391401
...openRouterSchema.shape,
392402
...bedrockSchema.shape,
@@ -503,6 +513,11 @@ export const MODELS_BY_PROVIDER: Record<
503513
models: Object.keys(chutesModels),
504514
},
505515
"claude-code": { id: "claude-code", label: "Claude Code", models: Object.keys(claudeCodeModels) },
516+
cometapi: {
517+
id: "cometapi",
518+
label: "CometAPI",
519+
models: Object.keys(cometApiModels),
520+
},
506521
deepseek: {
507522
id: "deepseek",
508523
label: "DeepSeek",
@@ -577,6 +592,7 @@ export const MODELS_BY_PROVIDER: Record<
577592
}
578593

579594
export const dynamicProviders = [
595+
"cometapi",
580596
"glama",
581597
"huggingface",
582598
"litellm",
Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
import type { ModelInfo } from "../model.js"
2+
3+
export type CometAPIModelId = string
4+
5+
export const cometApiDefaultModelId: CometAPIModelId = "claude-sonnet-4-20250514"
6+
7+
export const cometApiDefaultModelInfo: ModelInfo = {
8+
maxTokens: undefined, // Let system determine based on contextWindow
9+
contextWindow: 200000, // Reasonable default for modern models
10+
supportsImages: false,
11+
supportsPromptCache: false,
12+
// Intentionally not setting inputPrice/outputPrice
13+
}
14+
15+
// Fallback models for when API is unavailable
16+
// Small helper to create a map of id -> default info
17+
const createModelMap = (ids: readonly CometAPIModelId[]): Record<CometAPIModelId, ModelInfo> =>
18+
Object.fromEntries(ids.map((id) => [id, { ...cometApiDefaultModelInfo }])) as Record<CometAPIModelId, ModelInfo>
19+
20+
// Single, complete list for readability and easy maintenance
21+
const COMET_FALLBACK_MODEL_IDS = [
22+
// OpenAI series
23+
"gpt-5-chat-latest",
24+
"gpt-5-mini",
25+
"gpt-5-nano",
26+
"gpt-4.1-mini",
27+
"gpt-4o-mini",
28+
29+
// Claude series
30+
"claude-opus-4-1-20250805",
31+
"claude-sonnet-4-20250514",
32+
"claude-3-7-sonnet-latest",
33+
"claude-3-5-haiku-latest",
34+
35+
// Gemini series
36+
"gemini-2.5-pro",
37+
"gemini-2.5-flash",
38+
"gemini-2.0-flash",
39+
40+
// DeepSeek series
41+
"deepseek-v3.1",
42+
"deepseek-r1-0528",
43+
"deepseek-reasoner",
44+
45+
// Other models
46+
"grok-4-0709",
47+
"qwen3-30b-a3b",
48+
"qwen3-coder-plus-2025-07-22",
49+
] as const satisfies readonly CometAPIModelId[]
50+
51+
export const cometApiModels: Record<CometAPIModelId, ModelInfo> = createModelMap(COMET_FALLBACK_MODEL_IDS)

packages/types/src/providers/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ export * from "./bedrock.js"
33
export * from "./cerebras.js"
44
export * from "./chutes.js"
55
export * from "./claude-code.js"
6+
export * from "./cometapi.js"
67
export * from "./deepseek.js"
78
export * from "./doubao.js"
89
export * from "./featherless.js"

src/api/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ import {
4040
FeatherlessHandler,
4141
VercelAiGatewayHandler,
4242
DeepInfraHandler,
43+
CometAPIHandler,
4344
} from "./providers"
4445
import { NativeOllamaHandler } from "./providers/native-ollama"
4546

@@ -141,6 +142,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
141142
return new GroqHandler(options)
142143
case "deepinfra":
143144
return new DeepInfraHandler(options)
145+
case "cometapi":
146+
return new CometAPIHandler(options)
144147
case "huggingface":
145148
return new HuggingFaceHandler(options)
146149
case "chutes":

src/api/providers/cometapi.ts

Lines changed: 148 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,148 @@
1+
import { Anthropic } from "@anthropic-ai/sdk"
2+
import OpenAI from "openai"
3+
4+
import { cometApiDefaultModelId, cometApiDefaultModelInfo, cometApiModels } from "@roo-code/types"
5+
6+
import type { ApiHandlerOptions } from "../../shared/api"
7+
import { calculateApiCostOpenAI } from "../../shared/cost"
8+
9+
import { ApiStream, ApiStreamUsageChunk } from "../transform/stream"
10+
import { convertToOpenAiMessages } from "../transform/openai-format"
11+
import { getModelParams } from "../transform/model-params"
12+
13+
import type { SingleCompletionHandler, ApiHandlerCreateMessageMetadata } from "../index"
14+
import { RouterProvider } from "./router-provider"
15+
16+
export class CometAPIHandler extends RouterProvider implements SingleCompletionHandler {
17+
constructor(options: ApiHandlerOptions) {
18+
super({
19+
options: {
20+
...options,
21+
// Add custom headers for CometAPI
22+
openAiHeaders: {
23+
"HTTP-Referer": "https://github.com/RooVetGit/Roo-Code",
24+
"X-Title": "Roo Code",
25+
...(options.openAiHeaders || {}),
26+
},
27+
},
28+
name: "cometapi",
29+
baseURL: options.cometApiBaseUrl || "https://api.cometapi.com/v1",
30+
apiKey: options.cometApiKey || "not-provided",
31+
modelId: options.cometApiModelId,
32+
defaultModelId: cometApiDefaultModelId,
33+
defaultModelInfo: cometApiDefaultModelInfo,
34+
})
35+
36+
// Initialize with fallback models to ensure we always have models available
37+
this.models = { ...cometApiModels }
38+
}
39+
40+
public override async fetchModel() {
41+
// Fetch dynamic models from API, but keep fallback models if API fails
42+
try {
43+
const apiModels = await super.fetchModel()
44+
// Merge API models with fallback models
45+
this.models = { ...cometApiModels, ...this.models }
46+
return apiModels
47+
} catch (error) {
48+
console.warn("CometAPI: Failed to fetch models from API, using fallback models", error)
49+
// Return default model using fallback models
50+
return this.getModel()
51+
}
52+
}
53+
54+
override getModel() {
55+
const id = this.options.cometApiModelId ?? cometApiDefaultModelId
56+
const info = this.models[id] ?? cometApiDefaultModelInfo
57+
58+
const params = getModelParams({
59+
format: "openai",
60+
modelId: id,
61+
model: info,
62+
settings: this.options,
63+
})
64+
65+
return { id, info, ...params }
66+
}
67+
68+
override async *createMessage(
69+
systemPrompt: string,
70+
messages: Anthropic.Messages.MessageParam[],
71+
_metadata?: ApiHandlerCreateMessageMetadata,
72+
): ApiStream {
73+
// Ensure we have up-to-date model metadata
74+
await this.fetchModel()
75+
const { id: modelId, info, reasoningEffort } = this.getModel()
76+
77+
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
78+
model: modelId,
79+
messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)],
80+
stream: true,
81+
stream_options: { include_usage: true },
82+
reasoning_effort: reasoningEffort,
83+
} as OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming
84+
85+
if (this.supportsTemperature(modelId)) {
86+
requestOptions.temperature = this.options.modelTemperature ?? 0
87+
}
88+
89+
if (this.options.includeMaxTokens === true && info.maxTokens) {
90+
;(requestOptions as any).max_completion_tokens = this.options.modelMaxTokens || info.maxTokens
91+
}
92+
93+
const { data: stream } = await this.client.chat.completions.create(requestOptions).withResponse()
94+
95+
let lastUsage: OpenAI.CompletionUsage | undefined
96+
for await (const chunk of stream) {
97+
const delta = chunk.choices[0]?.delta
98+
99+
if (delta?.content) {
100+
yield { type: "text", text: delta.content }
101+
}
102+
103+
if (delta && "reasoning_content" in delta && delta.reasoning_content) {
104+
yield { type: "reasoning", text: (delta.reasoning_content as string | undefined) || "" }
105+
}
106+
107+
if (chunk.usage) {
108+
lastUsage = chunk.usage
109+
}
110+
}
111+
112+
if (lastUsage) {
113+
const inputTokens = lastUsage.prompt_tokens || 0
114+
const outputTokens = lastUsage.completion_tokens || 0
115+
const cacheWriteTokens = lastUsage.prompt_tokens_details?.cached_tokens || 0
116+
const cacheReadTokens = 0
117+
118+
const totalCost = calculateApiCostOpenAI(info, inputTokens, outputTokens, cacheWriteTokens, cacheReadTokens)
119+
120+
const usage: ApiStreamUsageChunk = {
121+
type: "usage",
122+
inputTokens,
123+
outputTokens,
124+
cacheWriteTokens: cacheWriteTokens || undefined,
125+
cacheReadTokens: cacheReadTokens || undefined,
126+
totalCost,
127+
}
128+
129+
yield usage
130+
}
131+
}
132+
133+
async completePrompt(prompt: string): Promise<string> {
134+
const { id: modelId } = this.getModel()
135+
136+
try {
137+
const response = await this.client.chat.completions.create({
138+
model: modelId,
139+
messages: [{ role: "user", content: prompt }],
140+
stream: false,
141+
})
142+
143+
return response.choices[0]?.message?.content || ""
144+
} catch (error) {
145+
throw new Error(`CometAPI completion error: ${error}`)
146+
}
147+
}
148+
}

0 commit comments

Comments
 (0)