Skip to content

Commit 742a72b

Browse files
celestial-vaultElephant Lumps
andauthored
migrate openRouterModels protobus (RooCodeInc#4058)
* migrate openRouterModels * remove conversion functions and unused imports --------- Co-authored-by: Elephant Lumps <[email protected]>
1 parent 84a00b0 commit 742a72b

File tree

12 files changed

+144
-190
lines changed

12 files changed

+144
-190
lines changed

hosts/vscode/host-grpc-service-config.ts

Lines changed: 0 additions & 33 deletions
This file was deleted.

hosts/vscode/uri/index.ts

Lines changed: 0 additions & 22 deletions
This file was deleted.

hosts/vscode/uri/methods.ts

Lines changed: 0 additions & 16 deletions
This file was deleted.

hosts/vscode/watch/index.ts

Lines changed: 0 additions & 22 deletions
This file was deleted.

hosts/vscode/watch/methods.ts

Lines changed: 0 additions & 15 deletions
This file was deleted.

proto/models.proto

Lines changed: 35 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ service ModelsService {
2020
rpc refreshOpenAiModels(OpenAiModelsRequest) returns (StringArray);
2121
// Refreshes and returns Requesty models
2222
rpc refreshRequestyModels(EmptyRequest) returns (OpenRouterCompatibleModelInfo);
23+
// Subscribe to OpenRouter models updates
24+
rpc subscribeToOpenRouterModels(EmptyRequest) returns (stream OpenRouterCompatibleModelInfo);
2325
}
2426

2527
// List of VS Code LM models
@@ -35,17 +37,42 @@ message VsCodeLmModel {
3537
string id = 4;
3638
}
3739

40+
// Price tier for tiered pricing models
41+
message PriceTier {
42+
int32 token_limit = 1; // Upper limit (inclusive) of input tokens for this price
43+
double price = 2; // Price per million tokens for this tier
44+
}
45+
46+
// Thinking configuration for models that support thinking/reasoning
47+
message ThinkingConfig {
48+
optional int32 max_budget = 1; // Max allowed thinking budget tokens
49+
optional double output_price = 2; // Output price per million tokens when budget > 0
50+
repeated PriceTier output_price_tiers = 3; // Optional: Tiered output price when budget > 0
51+
}
52+
53+
// Model tier for tiered pricing structures
54+
message ModelTier {
55+
int32 context_window = 1;
56+
optional double input_price = 2;
57+
optional double output_price = 3;
58+
optional double cache_writes_price = 4;
59+
optional double cache_reads_price = 5;
60+
}
61+
3862
// For OpenRouterCompatibleModelInfo structure in OpenRouterModels
3963
message OpenRouterModelInfo {
40-
int32 max_tokens = 1;
41-
int32 context_window = 2;
42-
bool supports_images = 3;
64+
optional int32 max_tokens = 1;
65+
optional int32 context_window = 2;
66+
optional bool supports_images = 3;
4367
bool supports_prompt_cache = 4;
44-
double input_price = 5;
45-
double output_price = 6;
46-
double cache_writes_price = 7;
47-
double cache_reads_price = 8;
48-
string description = 9;
68+
optional double input_price = 5;
69+
optional double output_price = 6;
70+
optional double cache_writes_price = 7;
71+
optional double cache_reads_price = 8;
72+
optional string description = 9;
73+
optional ThinkingConfig thinking_config = 10;
74+
optional bool supports_global_endpoint = 11;
75+
repeated ModelTier tiers = 12;
4976
}
5077

5178
// Shared response message for model information

src/core/controller/index.ts

Lines changed: 6 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import { Anthropic } from "@anthropic-ai/sdk"
22
import axios from "axios"
33
import { v4 as uuidv4 } from "uuid"
4-
54
import fs from "fs/promises"
65
import { setTimeout as setTimeoutPromise } from "node:timers/promises"
76
import pWaitFor from "p-wait-for"
@@ -13,53 +12,40 @@ import { EmptyRequest } from "@shared/proto/common"
1312
import { buildApiHandler } from "@api/index"
1413
import { cleanupLegacyCheckpoints } from "@integrations/checkpoints/CheckpointMigration"
1514
import { downloadTask } from "@integrations/misc/export-markdown"
16-
import { fetchOpenGraphData } from "@integrations/misc/link-preview"
17-
import { handleFileServiceRequest } from "./file"
18-
import { getTheme } from "@integrations/theme/getTheme"
1915
import WorkspaceTracker from "@integrations/workspace/WorkspaceTracker"
2016
import { ClineAccountService } from "@services/account/ClineAccountService"
21-
import { BrowserSession } from "@services/browser/BrowserSession"
2217
import { McpHub } from "@services/mcp/McpHub"
2318
import { telemetryService } from "@/services/posthog/telemetry/TelemetryService"
2419
import { ApiProvider, ModelInfo } from "@shared/api"
2520
import { ChatContent } from "@shared/ChatContent"
2621
import { ChatSettings } from "@shared/ChatSettings"
2722
import { ExtensionMessage, ExtensionState, Platform } from "@shared/ExtensionMessage"
2823
import { HistoryItem } from "@shared/HistoryItem"
29-
import { McpDownloadResponse, McpMarketplaceCatalog, McpServer } from "@shared/mcp"
24+
import { McpMarketplaceCatalog } from "@shared/mcp"
3025
import { TelemetrySetting } from "@shared/TelemetrySetting"
3126
import { WebviewMessage } from "@shared/WebviewMessage"
3227
import { fileExistsAtPath } from "@utils/fs"
3328
import { getWorkingState } from "@utils/git"
3429
import { extractCommitMessage } from "@integrations/git/commit-message-generator"
35-
import { getTotalTasksSize } from "@utils/storage"
36-
import {
37-
ensureMcpServersDirectoryExists,
38-
ensureSettingsDirectoryExists,
39-
GlobalFileNames,
40-
ensureWorkflowsDirectoryExists,
41-
} from "../storage/disk"
30+
import { ensureMcpServersDirectoryExists, ensureSettingsDirectoryExists, GlobalFileNames } from "../storage/disk"
4231
import {
4332
getAllExtensionState,
4433
getGlobalState,
4534
getSecret,
4635
getWorkspaceState,
47-
resetExtensionState,
4836
storeSecret,
4937
updateApiConfiguration,
5038
updateGlobalState,
5139
updateWorkspaceState,
5240
} from "../storage/state"
53-
import { Task, cwd } from "../task"
41+
import { Task } from "../task"
5442
import { ClineRulesToggles } from "@shared/cline-rules"
5543
import { sendStateUpdate } from "./state/subscribeToState"
5644
import { sendAddToInputEvent } from "./ui/subscribeToAddToInput"
5745
import { sendAuthCallbackEvent } from "./account/subscribeToAuthCallback"
58-
import { sendChatButtonClickedEvent } from "./ui/subscribeToChatButtonClicked"
5946
import { sendMcpMarketplaceCatalogEvent } from "./mcp/subscribeToMcpMarketplaceCatalog"
60-
import { refreshClineRulesToggles } from "@core/context/instructions/user-instructions/cline-rules"
61-
import { refreshExternalRulesToggles } from "@core/context/instructions/user-instructions/external-rules"
62-
import { refreshWorkflowToggles } from "@core/context/instructions/user-instructions/workflows"
47+
import { sendOpenRouterModelsEvent } from "./models/subscribeToOpenRouterModels"
48+
import { OpenRouterCompatibleModelInfo } from "@/shared/proto/models"
6349

6450
/*
6551
https://github.com/microsoft/vscode-webview-ui-toolkit-samples/blob/main/default/weather-webview/src/providers/WeatherViewProvider.ts
@@ -227,10 +213,7 @@ export class Controller {
227213
// post last cached models in case the call to endpoint fails
228214
this.readOpenRouterModels().then((openRouterModels) => {
229215
if (openRouterModels) {
230-
this.postMessageToWebview({
231-
type: "openRouterModels",
232-
openRouterModels,
233-
})
216+
sendOpenRouterModelsEvent(OpenRouterCompatibleModelInfo.create({ models: openRouterModels }))
234217
}
235218
})
236219
// gui relies on model info to be up-to-date to provide the most accurate pricing, so we need to fetch the latest details on launch.

src/core/controller/models/refreshOpenRouterModels.ts

Lines changed: 16 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ export async function refreshOpenRouterModels(
1919
): Promise<OpenRouterCompatibleModelInfo> {
2020
const openRouterModelsFilePath = path.join(await ensureCacheDirectoryExists(controller), GlobalFileNames.openRouterModels)
2121

22-
let models: Record<string, Partial<OpenRouterModelInfo>> = {}
22+
let models: Record<string, OpenRouterModelInfo> = {}
2323
try {
2424
const response = await axios.get("https://openrouter.ai/api/v1/models")
2525

@@ -32,15 +32,20 @@ export async function refreshOpenRouterModels(
3232
return undefined
3333
}
3434
for (const rawModel of rawModels) {
35-
const modelInfo: Partial<OpenRouterModelInfo> = {
36-
maxTokens: rawModel.top_provider?.max_completion_tokens,
37-
contextWindow: rawModel.context_length,
38-
supportsImages: rawModel.architecture?.modality?.includes("image"),
35+
const modelInfo = OpenRouterModelInfo.create({
36+
maxTokens: rawModel.top_provider?.max_completion_tokens ?? 0,
37+
contextWindow: rawModel.context_length ?? 0,
38+
supportsImages: rawModel.architecture?.modality?.includes("image") ?? false,
3939
supportsPromptCache: false,
40-
inputPrice: parsePrice(rawModel.pricing?.prompt),
41-
outputPrice: parsePrice(rawModel.pricing?.completion),
42-
description: rawModel.description,
43-
}
40+
inputPrice: parsePrice(rawModel.pricing?.prompt) ?? 0,
41+
outputPrice: parsePrice(rawModel.pricing?.completion) ?? 0,
42+
cacheWritesPrice: 0,
43+
cacheReadsPrice: 0,
44+
description: rawModel.description ?? "",
45+
thinkingConfig: rawModel.thinking_config ?? undefined,
46+
supportsGlobalEndpoint: rawModel.supports_global_endpoint ?? undefined,
47+
tiers: rawModel.tiers ?? [],
48+
})
4449

4550
switch (rawModel.id) {
4651
case "anthropic/claude-sonnet-4":
@@ -129,30 +134,13 @@ export async function refreshOpenRouterModels(
129134
}
130135
}
131136

132-
// Convert the Record<string, Partial<OpenRouterModelInfo>> to Record<string, OpenRouterModelInfo>
133-
// by filling in any missing required fields with defaults
134-
const typedModels: Record<string, OpenRouterModelInfo> = {}
135-
for (const [key, model] of Object.entries(models)) {
136-
typedModels[key] = {
137-
maxTokens: model.maxTokens ?? 0,
138-
contextWindow: model.contextWindow ?? 0,
139-
supportsImages: model.supportsImages ?? false,
140-
supportsPromptCache: model.supportsPromptCache ?? false,
141-
inputPrice: model.inputPrice ?? 0,
142-
outputPrice: model.outputPrice ?? 0,
143-
cacheWritesPrice: model.cacheWritesPrice ?? 0,
144-
cacheReadsPrice: model.cacheReadsPrice ?? 0,
145-
description: model.description ?? "",
146-
}
147-
}
148-
149-
return OpenRouterCompatibleModelInfo.create({ models: typedModels })
137+
return OpenRouterCompatibleModelInfo.create({ models })
150138
}
151139

152140
/**
153141
* Reads cached OpenRouter models from disk
154142
*/
155-
async function readOpenRouterModels(controller: Controller): Promise<Record<string, Partial<OpenRouterModelInfo>> | undefined> {
143+
async function readOpenRouterModels(controller: Controller): Promise<Record<string, OpenRouterModelInfo> | undefined> {
156144
const openRouterModelsFilePath = path.join(await ensureCacheDirectoryExists(controller), GlobalFileNames.openRouterModels)
157145
const fileExists = await fileExistsAtPath(openRouterModelsFilePath)
158146
if (fileExists) {
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import { Controller } from "../index"
2+
import { EmptyRequest } from "@shared/proto/common"
3+
import { OpenRouterCompatibleModelInfo } from "@shared/proto/models"
4+
import { StreamingResponseHandler, getRequestRegistry } from "../grpc-handler"
5+
6+
// Keep track of active OpenRouter models subscriptions
7+
const activeOpenRouterModelsSubscriptions = new Set<StreamingResponseHandler>()
8+
9+
/**
10+
* Subscribe to OpenRouter models events
11+
* @param controller The controller instance
12+
* @param request The empty request
13+
* @param responseStream The streaming response handler
14+
* @param requestId The ID of the request (passed by the gRPC handler)
15+
*/
16+
export async function subscribeToOpenRouterModels(
17+
controller: Controller,
18+
request: EmptyRequest,
19+
responseStream: StreamingResponseHandler,
20+
requestId?: string,
21+
): Promise<void> {
22+
console.log("[DEBUG] set up OpenRouter models subscription")
23+
24+
// Add this subscription to the active subscriptions
25+
activeOpenRouterModelsSubscriptions.add(responseStream)
26+
27+
// Register cleanup when the connection is closed
28+
const cleanup = () => {
29+
activeOpenRouterModelsSubscriptions.delete(responseStream)
30+
console.log("[DEBUG] Cleaned up OpenRouter models subscription")
31+
}
32+
33+
// Register the cleanup function with the request registry if we have a requestId
34+
if (requestId) {
35+
getRequestRegistry().registerRequest(requestId, cleanup, { type: "openRouterModels_subscription" }, responseStream)
36+
}
37+
}
38+
39+
/**
40+
* Send an OpenRouter models event to all active subscribers
41+
* @param models The OpenRouter models to send
42+
*/
43+
export async function sendOpenRouterModelsEvent(models: OpenRouterCompatibleModelInfo): Promise<void> {
44+
// Send the event to all active subscribers
45+
const promises = Array.from(activeOpenRouterModelsSubscriptions).map(async (responseStream) => {
46+
try {
47+
await responseStream(
48+
models,
49+
false, // Not the last message
50+
)
51+
console.log("[DEBUG] sending OpenRouter models event")
52+
} catch (error) {
53+
console.error("Error sending OpenRouter models event:", error)
54+
// Remove the subscription if there was an error
55+
activeOpenRouterModelsSubscriptions.delete(responseStream)
56+
}
57+
})
58+
59+
await Promise.all(promises)
60+
}

src/shared/ExtensionMessage.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ export interface ExtensionMessage {
2020
| "ollamaModels"
2121
| "lmStudioModels"
2222
| "workspaceUpdated"
23-
| "openRouterModels"
2423
| "openAiModels"
2524
| "requestyModels"
2625
| "mcpServers"
@@ -43,7 +42,6 @@ export interface ExtensionMessage {
4342
lmStudioModels?: string[]
4443
vsCodeLmModels?: { vendor?: string; family?: string; version?: string; id?: string }[]
4544
filePaths?: string[]
46-
openRouterModels?: Record<string, ModelInfo>
4745
openAiModels?: string[]
4846
requestyModels?: Record<string, ModelInfo>
4947
mcpServers?: McpServer[]

0 commit comments

Comments
 (0)