|
1 | 1 | import { Anthropic } from "@anthropic-ai/sdk" |
2 | 2 | import axios from "axios" |
3 | 3 | import OpenAI from "openai" |
4 | | -import { ApiHandler } from "../" |
| 4 | +import delay from "delay" |
| 5 | + |
5 | 6 | import { ApiHandlerOptions, ModelInfo, openRouterDefaultModelId, openRouterDefaultModelInfo } from "../../shared/api" |
| 7 | +import { parseApiPrice } from "../../utils/cost" |
6 | 8 | import { convertToOpenAiMessages } from "../transform/openai-format" |
7 | 9 | import { ApiStreamChunk, ApiStreamUsageChunk } from "../transform/stream" |
8 | | -import delay from "delay" |
| 10 | +import { convertToR1Format } from "../transform/r1-format" |
9 | 11 | import { DEEP_SEEK_DEFAULT_TEMPERATURE } from "./openai" |
| 12 | +import { ApiHandler, SingleCompletionHandler } from ".." |
10 | 13 |
|
11 | 14 | const OPENROUTER_DEFAULT_TEMPERATURE = 0 |
12 | 15 |
|
13 | | -// Add custom interface for OpenRouter params |
| 16 | +// Add custom interface for OpenRouter params. |
14 | 17 | type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & { |
15 | 18 | transforms?: string[] |
16 | 19 | include_reasoning?: boolean |
17 | 20 | } |
18 | 21 |
|
19 | | -// Add custom interface for OpenRouter usage chunk |
| 22 | +// Add custom interface for OpenRouter usage chunk. |
20 | 23 | interface OpenRouterApiStreamUsageChunk extends ApiStreamUsageChunk { |
21 | 24 | fullResponseText: string |
22 | 25 | } |
23 | 26 |
|
24 | | -import { SingleCompletionHandler } from ".." |
25 | | -import { convertToR1Format } from "../transform/r1-format" |
26 | | - |
27 | 27 | export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler { |
28 | 28 | private options: ApiHandlerOptions |
29 | 29 | private client: OpenAI |
@@ -222,3 +222,75 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler { |
222 | 222 | } |
223 | 223 | } |
224 | 224 | } |
| 225 | + |
| 226 | +export async function getOpenRouterModels() { |
| 227 | + const models: Record<string, ModelInfo> = {} |
| 228 | + |
| 229 | + try { |
| 230 | + const response = await axios.get("https://openrouter.ai/api/v1/models") |
| 231 | + const rawModels = response.data.data |
| 232 | + |
| 233 | + for (const rawModel of rawModels) { |
| 234 | + const modelInfo: ModelInfo = { |
| 235 | + maxTokens: rawModel.top_provider?.max_completion_tokens, |
| 236 | + contextWindow: rawModel.context_length, |
| 237 | + supportsImages: rawModel.architecture?.modality?.includes("image"), |
| 238 | + supportsPromptCache: false, |
| 239 | + inputPrice: parseApiPrice(rawModel.pricing?.prompt), |
| 240 | + outputPrice: parseApiPrice(rawModel.pricing?.completion), |
| 241 | + description: rawModel.description, |
| 242 | + } |
| 243 | + |
| 244 | + // NOTE: this needs to be synced with api.ts/openrouter default model info. |
| 245 | + switch (true) { |
| 246 | + case rawModel.id.startsWith("anthropic/claude-3.7-sonnet"): |
| 247 | + modelInfo.supportsComputerUse = true |
| 248 | + modelInfo.supportsPromptCache = true |
| 249 | + modelInfo.cacheWritesPrice = 3.75 |
| 250 | + modelInfo.cacheReadsPrice = 0.3 |
| 251 | + modelInfo.maxTokens = 16384 |
| 252 | + break |
| 253 | + case rawModel.id.startsWith("anthropic/claude-3.5-sonnet-20240620"): |
| 254 | + modelInfo.supportsPromptCache = true |
| 255 | + modelInfo.cacheWritesPrice = 3.75 |
| 256 | + modelInfo.cacheReadsPrice = 0.3 |
| 257 | + modelInfo.maxTokens = 8192 |
| 258 | + break |
| 259 | + case rawModel.id.startsWith("anthropic/claude-3.5-sonnet"): |
| 260 | + modelInfo.supportsComputerUse = true |
| 261 | + modelInfo.supportsPromptCache = true |
| 262 | + modelInfo.cacheWritesPrice = 3.75 |
| 263 | + modelInfo.cacheReadsPrice = 0.3 |
| 264 | + modelInfo.maxTokens = 8192 |
| 265 | + break |
| 266 | + case rawModel.id.startsWith("anthropic/claude-3-5-haiku"): |
| 267 | + modelInfo.supportsPromptCache = true |
| 268 | + modelInfo.cacheWritesPrice = 1.25 |
| 269 | + modelInfo.cacheReadsPrice = 0.1 |
| 270 | + modelInfo.maxTokens = 8192 |
| 271 | + break |
| 272 | + case rawModel.id.startsWith("anthropic/claude-3-opus"): |
| 273 | + modelInfo.supportsPromptCache = true |
| 274 | + modelInfo.cacheWritesPrice = 18.75 |
| 275 | + modelInfo.cacheReadsPrice = 1.5 |
| 276 | + modelInfo.maxTokens = 8192 |
| 277 | + break |
| 278 | + case rawModel.id.startsWith("anthropic/claude-3-haiku"): |
| 279 | + default: |
| 280 | + modelInfo.supportsPromptCache = true |
| 281 | + modelInfo.cacheWritesPrice = 0.3 |
| 282 | + modelInfo.cacheReadsPrice = 0.03 |
| 283 | + modelInfo.maxTokens = 8192 |
| 284 | + break |
| 285 | + } |
| 286 | + |
| 287 | + models[rawModel.id] = modelInfo |
| 288 | + } |
| 289 | + } catch (error) { |
| 290 | + console.error( |
| 291 | + `Error fetching OpenRouter models: ${JSON.stringify(error, Object.getOwnPropertyNames(error), 2)}`, |
| 292 | + ) |
| 293 | + } |
| 294 | + |
| 295 | + return models |
| 296 | +} |
0 commit comments