Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 0 additions & 74 deletions packages/types/src/providers/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,80 +68,6 @@ export const geminiModels = {
inputPrice: 0,
outputPrice: 0,
},
"gemini-2.5-pro-preview-03-25": {
maxTokens: 65_535,
contextWindow: 1_048_576,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 2.5, // This is the pricing for prompts above 200k tokens.
outputPrice: 15,
cacheReadsPrice: 0.625,
cacheWritesPrice: 4.5,
tiers: [
{
contextWindow: 200_000,
inputPrice: 1.25,
outputPrice: 10,
cacheReadsPrice: 0.31,
},
{
contextWindow: Infinity,
inputPrice: 2.5,
outputPrice: 15,
cacheReadsPrice: 0.625,
},
],
},
"gemini-2.5-pro-preview-05-06": {
maxTokens: 65_535,
contextWindow: 1_048_576,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 2.5, // This is the pricing for prompts above 200k tokens.
outputPrice: 15,
cacheReadsPrice: 0.625,
cacheWritesPrice: 4.5,
tiers: [
{
contextWindow: 200_000,
inputPrice: 1.25,
outputPrice: 10,
cacheReadsPrice: 0.31,
},
{
contextWindow: Infinity,
inputPrice: 2.5,
outputPrice: 15,
cacheReadsPrice: 0.625,
},
],
},
"gemini-2.5-pro-preview-06-05": {
maxTokens: 65_535,
contextWindow: 1_048_576,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 2.5, // This is the pricing for prompts above 200k tokens.
outputPrice: 15,
cacheReadsPrice: 0.625,
cacheWritesPrice: 4.5,
maxThinkingTokens: 32_768,
supportsReasoningBudget: true,
tiers: [
{
contextWindow: 200_000,
inputPrice: 1.25,
outputPrice: 10,
cacheReadsPrice: 0.31,
},
{
contextWindow: Infinity,
inputPrice: 2.5,
outputPrice: 15,
cacheReadsPrice: 0.625,
},
],
},
"gemini-2.5-pro": {
maxTokens: 64_000,
contextWindow: 1_048_576,
Expand Down
1 change: 0 additions & 1 deletion packages/types/src/providers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,6 @@ export const OPEN_ROUTER_REASONING_BUDGET_MODELS = new Set([
"anthropic/claude-3.7-sonnet:beta",
"anthropic/claude-opus-4",
"anthropic/claude-sonnet-4",
"google/gemini-2.5-pro-preview",
"google/gemini-2.5-pro",
"google/gemini-2.5-flash-preview-05-20",
"google/gemini-2.5-flash",
Expand Down
26 changes: 0 additions & 26 deletions packages/types/src/providers/vertex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -56,32 +56,6 @@ export const vertexModels = {
inputPrice: 0.15,
outputPrice: 0.6,
},
"gemini-2.5-pro-preview-03-25": {
maxTokens: 65_535,
contextWindow: 1_048_576,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 2.5,
outputPrice: 15,
},
"gemini-2.5-pro-preview-05-06": {
maxTokens: 65_535,
contextWindow: 1_048_576,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 2.5,
outputPrice: 15,
},
"gemini-2.5-pro-preview-06-05": {
maxTokens: 65_535,
contextWindow: 1_048_576,
supportsImages: true,
supportsPromptCache: true,
inputPrice: 2.5,
outputPrice: 15,
maxThinkingTokens: 32_768,
supportsReasoningBudget: true,
},
"gemini-2.5-pro": {
maxTokens: 64_000,
contextWindow: 1_048_576,
Expand Down
4 changes: 2 additions & 2 deletions packages/types/src/providers/vscode-llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,8 +120,8 @@ export const vscodeLlmModels = {
inputPrice: 0,
outputPrice: 0,
family: "gemini-2.5-pro",
version: "gemini-2.5-pro-preview-03-25",
name: "Gemini 2.5 Pro (Preview)",
version: "gemini-2.5-pro",
name: "Gemini 2.5 Pro",
supportsToolCalling: true,
maxInputTokens: 63830,
},
Expand Down
2 changes: 0 additions & 2 deletions src/api/providers/fetchers/__tests__/openrouter.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,6 @@ describe("OpenRouter API", () => {
contextWindow: 1048576,
supportsImages: true,
supportsPromptCache: true,
supportsReasoningBudget: true,
inputPrice: 1.25,
outputPrice: 10,
cacheWritesPrice: 1.625,
Expand All @@ -237,7 +236,6 @@ describe("OpenRouter API", () => {
contextWindow: 1048576,
supportsImages: true,
supportsPromptCache: true,
supportsReasoningBudget: true,
inputPrice: 1.25,
outputPrice: 10,
cacheWritesPrice: 1.625,
Expand Down
19 changes: 18 additions & 1 deletion src/api/providers/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,15 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl

override getModel() {
const modelId = this.options.apiModelId
let id = modelId && modelId in geminiModels ? (modelId as GeminiModelId) : geminiDefaultModelId

// Handle backward compatibility for legacy preview model names
let mappedModelId = modelId
if (modelId && this.isLegacyPreviewModel(modelId)) {
mappedModelId = "gemini-2.5-pro"
}

let id =
mappedModelId && mappedModelId in geminiModels ? (mappedModelId as GeminiModelId) : geminiDefaultModelId
const info: ModelInfo = geminiModels[id]
const params = getModelParams({ format: "gemini", modelId: id, model: info, settings: this.options })

Expand All @@ -142,6 +150,15 @@ export class GeminiHandler extends BaseProvider implements SingleCompletionHandl
return { id: id.endsWith(":thinking") ? id.replace(":thinking", "") : id, info, ...params }
}

protected isLegacyPreviewModel(modelId: string): boolean {
const legacyPreviewModels = [
"gemini-2.5-pro-preview-03-25",
"gemini-2.5-pro-preview-05-06",
"gemini-2.5-pro-preview-06-05",
]
return legacyPreviewModels.includes(modelId)
}

async completePrompt(prompt: string): Promise<string> {
try {
const { id: model } = this.getModel()
Expand Down
15 changes: 11 additions & 4 deletions src/api/providers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,13 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
// other providers (including Gemini), so we need to explicitly disable
// i We should generalize this using the logic in `getModelParams`, but
// this is easier for now.
if (
(modelId === "google/gemini-2.5-pro-preview" || modelId === "google/gemini-2.5-pro") &&
typeof reasoning === "undefined"
) {
// Handle backward compatibility for legacy preview model names
let mappedModelId = modelId
if (this.isLegacyGeminiPreviewModel(modelId)) {
mappedModelId = "google/gemini-2.5-pro"
}

if (mappedModelId === "google/gemini-2.5-pro" && typeof reasoning === "undefined") {
reasoning = { exclude: true }
}

Expand Down Expand Up @@ -242,4 +245,8 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
const completion = response as OpenAI.Chat.ChatCompletion
return completion.choices[0]?.message?.content || ""
}

private isLegacyGeminiPreviewModel(modelId: string): boolean {
return modelId === "google/gemini-2.5-pro-preview"
}
}
10 changes: 9 additions & 1 deletion src/api/providers/vertex.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,15 @@ export class VertexHandler extends GeminiHandler implements SingleCompletionHand

override getModel() {
const modelId = this.options.apiModelId
let id = modelId && modelId in vertexModels ? (modelId as VertexModelId) : vertexDefaultModelId

// Handle backward compatibility for legacy preview model names
let mappedModelId = modelId
if (modelId && this.isLegacyPreviewModel(modelId)) {
mappedModelId = "gemini-2.5-pro"
}

let id =
mappedModelId && mappedModelId in vertexModels ? (mappedModelId as VertexModelId) : vertexDefaultModelId
const info: ModelInfo = vertexModels[id]
const params = getModelParams({ format: "gemini", modelId: id, model: info, settings: this.options })

Expand Down