Skip to content

Commit 934bfd0

Browse files
joshualipman123daniel-lxscte
authored
feat: Add Vercel AI Gateway provider integration (RooCodeInc#7396)
Co-authored-by: daniel-lxs <[email protected]> Co-authored-by: cte <[email protected]>
1 parent 7c91e4f commit 934bfd0

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+1492
-11
lines changed

packages/types/npm/package.metadata.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@roo-code/types",
3-
"version": "1.60.0",
3+
"version": "1.61.0",
44
"description": "TypeScript type definitions for Roo Code.",
55
"publishConfig": {
66
"access": "public",

packages/types/src/global-settings.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,7 @@ export const SECRET_STATE_KEYS = [
198198
"fireworksApiKey",
199199
"featherlessApiKey",
200200
"ioIntelligenceApiKey",
201+
"vercelAiGatewayApiKey",
201202
] as const satisfies readonly (keyof ProviderSettings)[]
202203
export type SecretState = Pick<ProviderSettings, (typeof SECRET_STATE_KEYS)[number]>
203204

packages/types/src/provider-settings.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ export const providerNames = [
6666
"featherless",
6767
"io-intelligence",
6868
"roo",
69+
"vercel-ai-gateway",
6970
] as const
7071

7172
export const providerNamesSchema = z.enum(providerNames)
@@ -321,6 +322,11 @@ const rooSchema = apiModelIdProviderModelSchema.extend({
321322
// No additional fields needed - uses cloud authentication
322323
})
323324

325+
const vercelAiGatewaySchema = baseProviderSettingsSchema.extend({
326+
vercelAiGatewayApiKey: z.string().optional(),
327+
vercelAiGatewayModelId: z.string().optional(),
328+
})
329+
324330
const defaultSchema = z.object({
325331
apiProvider: z.undefined(),
326332
})
@@ -360,6 +366,7 @@ export const providerSettingsSchemaDiscriminated = z.discriminatedUnion("apiProv
360366
ioIntelligenceSchema.merge(z.object({ apiProvider: z.literal("io-intelligence") })),
361367
qwenCodeSchema.merge(z.object({ apiProvider: z.literal("qwen-code") })),
362368
rooSchema.merge(z.object({ apiProvider: z.literal("roo") })),
369+
vercelAiGatewaySchema.merge(z.object({ apiProvider: z.literal("vercel-ai-gateway") })),
363370
defaultSchema,
364371
])
365372

@@ -399,6 +406,7 @@ export const providerSettingsSchema = z.object({
399406
...ioIntelligenceSchema.shape,
400407
...qwenCodeSchema.shape,
401408
...rooSchema.shape,
409+
...vercelAiGatewaySchema.shape,
402410
...codebaseIndexProviderSchema.shape,
403411
})
404412

@@ -425,6 +433,7 @@ export const MODEL_ID_KEYS: Partial<keyof ProviderSettings>[] = [
425433
"litellmModelId",
426434
"huggingFaceModelId",
427435
"ioIntelligenceModelId",
436+
"vercelAiGatewayModelId",
428437
]
429438

430439
export const getModelId = (settings: ProviderSettings): string | undefined => {
@@ -541,6 +550,7 @@ export const MODELS_BY_PROVIDER: Record<
541550
openrouter: { id: "openrouter", label: "OpenRouter", models: [] },
542551
requesty: { id: "requesty", label: "Requesty", models: [] },
543552
unbound: { id: "unbound", label: "Unbound", models: [] },
553+
"vercel-ai-gateway": { id: "vercel-ai-gateway", label: "Vercel AI Gateway", models: [] },
544554
}
545555

546556
export const dynamicProviders = [
@@ -550,6 +560,7 @@ export const dynamicProviders = [
550560
"openrouter",
551561
"requesty",
552562
"unbound",
563+
"vercel-ai-gateway",
553564
] as const satisfies readonly ProviderName[]
554565

555566
export type DynamicProvider = (typeof dynamicProviders)[number]

packages/types/src/providers/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,4 +27,5 @@ export * from "./unbound.js"
2727
export * from "./vertex.js"
2828
export * from "./vscode-llm.js"
2929
export * from "./xai.js"
30+
export * from "./vercel-ai-gateway.js"
3031
export * from "./zai.js"
Lines changed: 102 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,102 @@
1+
import type { ModelInfo } from "../model.js"
2+
3+
// https://ai-gateway.vercel.sh/v1/
4+
export const vercelAiGatewayDefaultModelId = "anthropic/claude-sonnet-4"
5+
6+
export const VERCEL_AI_GATEWAY_PROMPT_CACHING_MODELS = new Set([
7+
"anthropic/claude-3-haiku",
8+
"anthropic/claude-3-opus",
9+
"anthropic/claude-3.5-haiku",
10+
"anthropic/claude-3.5-sonnet",
11+
"anthropic/claude-3.7-sonnet",
12+
"anthropic/claude-opus-4",
13+
"anthropic/claude-opus-4.1",
14+
"anthropic/claude-sonnet-4",
15+
"openai/gpt-4.1",
16+
"openai/gpt-4.1-mini",
17+
"openai/gpt-4.1-nano",
18+
"openai/gpt-4o",
19+
"openai/gpt-4o-mini",
20+
"openai/gpt-5",
21+
"openai/gpt-5-mini",
22+
"openai/gpt-5-nano",
23+
"openai/o1",
24+
"openai/o3",
25+
"openai/o3-mini",
26+
"openai/o4-mini",
27+
])
28+
29+
export const VERCEL_AI_GATEWAY_VISION_ONLY_MODELS = new Set([
30+
"alibaba/qwen-3-14b",
31+
"alibaba/qwen-3-235b",
32+
"alibaba/qwen-3-30b",
33+
"alibaba/qwen-3-32b",
34+
"alibaba/qwen3-coder",
35+
"amazon/nova-pro",
36+
"anthropic/claude-3.5-haiku",
37+
"google/gemini-1.5-flash-8b",
38+
"google/gemini-2.0-flash-thinking",
39+
"google/gemma-3-27b",
40+
"mistral/devstral-small",
41+
"xai/grok-vision-beta",
42+
])
43+
44+
export const VERCEL_AI_GATEWAY_VISION_AND_TOOLS_MODELS = new Set([
45+
"amazon/nova-lite",
46+
"anthropic/claude-3-haiku",
47+
"anthropic/claude-3-opus",
48+
"anthropic/claude-3-sonnet",
49+
"anthropic/claude-3.5-sonnet",
50+
"anthropic/claude-3.7-sonnet",
51+
"anthropic/claude-opus-4",
52+
"anthropic/claude-opus-4.1",
53+
"anthropic/claude-sonnet-4",
54+
"google/gemini-1.5-flash",
55+
"google/gemini-1.5-pro",
56+
"google/gemini-2.0-flash",
57+
"google/gemini-2.0-flash-lite",
58+
"google/gemini-2.0-pro",
59+
"google/gemini-2.5-flash",
60+
"google/gemini-2.5-flash-lite",
61+
"google/gemini-2.5-pro",
62+
"google/gemini-exp",
63+
"meta/llama-3.2-11b",
64+
"meta/llama-3.2-90b",
65+
"meta/llama-3.3",
66+
"meta/llama-4-maverick",
67+
"meta/llama-4-scout",
68+
"mistral/pixtral-12b",
69+
"mistral/pixtral-large",
70+
"moonshotai/kimi-k2",
71+
"openai/gpt-4-turbo",
72+
"openai/gpt-4.1",
73+
"openai/gpt-4.1-mini",
74+
"openai/gpt-4.1-nano",
75+
"openai/gpt-4.5-preview",
76+
"openai/gpt-4o",
77+
"openai/gpt-4o-mini",
78+
"openai/gpt-oss-120b",
79+
"openai/gpt-oss-20b",
80+
"openai/o3",
81+
"openai/o3-pro",
82+
"openai/o4-mini",
83+
"vercel/v0-1.0-md",
84+
"xai/grok-2-vision",
85+
"zai/glm-4.5v",
86+
])
87+
88+
export const vercelAiGatewayDefaultModelInfo: ModelInfo = {
89+
maxTokens: 64000,
90+
contextWindow: 200000,
91+
supportsImages: true,
92+
supportsComputerUse: true,
93+
supportsPromptCache: true,
94+
inputPrice: 3,
95+
outputPrice: 15,
96+
cacheWritesPrice: 3.75,
97+
cacheReadsPrice: 0.3,
98+
description:
99+
"Claude Sonnet 4 significantly improves on Sonnet 3.7's industry-leading capabilities, excelling in coding with a state-of-the-art 72.7% on SWE-bench. The model balances performance and efficiency for internal and external use cases, with enhanced steerability for greater control over implementations. While not matching Opus 4 in most domains, it delivers an optimal mix of capability and practicality.",
100+
}
101+
102+
export const VERCEL_AI_GATEWAY_DEFAULT_TEMPERATURE = 0.7

pnpm-lock.yaml

Lines changed: 9 additions & 9 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/api/index.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ import {
3838
FireworksHandler,
3939
RooHandler,
4040
FeatherlessHandler,
41+
VercelAiGatewayHandler,
4142
} from "./providers"
4243
import { NativeOllamaHandler } from "./providers/native-ollama"
4344

@@ -151,6 +152,8 @@ export function buildApiHandler(configuration: ProviderSettings): ApiHandler {
151152
return new RooHandler(options)
152153
case "featherless":
153154
return new FeatherlessHandler(options)
155+
case "vercel-ai-gateway":
156+
return new VercelAiGatewayHandler(options)
154157
default:
155158
apiProvider satisfies "gemini-cli" | undefined
156159
return new AnthropicHandler(options)

0 commit comments

Comments
 (0)