Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions src/api/providers/openrouter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH

if (Array.isArray(msg.content)) {
// NOTE: this is fine since env details will always be added at the end. but if it weren't there, and the user added a image_url type message, it would pop a text part before it and then move it after to the end.
let lastTextPart = msg.content.filter((part) => part.type === "text").pop()
let lastTextPart = msg.content.filter((part: OpenAI.Chat.ChatCompletionContentPart) => part.type === "text").pop()

if (!lastTextPart) {
lastTextPart = { type: "text", text: "..." }
Expand Down Expand Up @@ -222,6 +222,7 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions) {
maxTokens: rawModel.top_provider?.max_completion_tokens,
contextWindow: rawModel.context_length,
supportsImages: rawModel.architecture?.modality?.includes("image"),
supportsComputerUse: true, // Enable for all OpenRouter models
supportsPromptCache: false,
inputPrice: parseApiPrice(rawModel.pricing?.prompt),
outputPrice: parseApiPrice(rawModel.pricing?.completion),
Expand All @@ -232,7 +233,7 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions) {
// NOTE: this needs to be synced with api.ts/openrouter default model info.
switch (true) {
case rawModel.id.startsWith("anthropic/claude-3.7-sonnet"):
modelInfo.supportsComputerUse = true
// modelInfo.supportsComputerUse = true // Removed: Handled by default now
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 3.75
modelInfo.cacheReadsPrice = 0.3
Expand All @@ -245,7 +246,7 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions) {
modelInfo.maxTokens = 8192
break
case rawModel.id.startsWith("anthropic/claude-3.5-sonnet"):
modelInfo.supportsComputerUse = true
// modelInfo.supportsComputerUse = true // Removed: Handled by default now
modelInfo.supportsPromptCache = true
modelInfo.cacheWritesPrice = 3.75
modelInfo.cacheReadsPrice = 0.3
Expand Down
38 changes: 19 additions & 19 deletions src/shared/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ export const bedrockModels = {
maxTokens: 5000,
contextWindow: 300_000,
supportsImages: true,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: true,
inputPrice: 0.8,
outputPrice: 3.2,
Expand All @@ -129,7 +129,7 @@ export const bedrockModels = {
maxTokens: 5000,
contextWindow: 300_000,
supportsImages: true,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 1.0,
outputPrice: 4.0,
Expand All @@ -141,7 +141,7 @@ export const bedrockModels = {
maxTokens: 5000,
contextWindow: 300_000,
supportsImages: true,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: true,
inputPrice: 0.06,
outputPrice: 0.24,
Expand All @@ -155,7 +155,7 @@ export const bedrockModels = {
maxTokens: 5000,
contextWindow: 128_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: true,
inputPrice: 0.035,
outputPrice: 0.14,
Expand Down Expand Up @@ -277,7 +277,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.72,
outputPrice: 0.72,
Expand All @@ -287,7 +287,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: true,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.72,
outputPrice: 0.72,
Expand All @@ -297,7 +297,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: true,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.16,
outputPrice: 0.16,
Expand All @@ -307,7 +307,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.15,
outputPrice: 0.15,
Expand All @@ -317,7 +317,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.1,
outputPrice: 0.1,
Expand All @@ -327,7 +327,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 2.4,
outputPrice: 2.4,
Expand All @@ -337,7 +337,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.72,
outputPrice: 0.72,
Expand All @@ -347,7 +347,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 128_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.9,
outputPrice: 0.9,
Expand All @@ -357,7 +357,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 8_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.22,
outputPrice: 0.22,
Expand All @@ -367,7 +367,7 @@ export const bedrockModels = {
maxTokens: 2048,
contextWindow: 8_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 2.65,
outputPrice: 3.5,
Expand All @@ -376,7 +376,7 @@ export const bedrockModels = {
maxTokens: 2048,
contextWindow: 4_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.3,
outputPrice: 0.6,
Expand All @@ -385,7 +385,7 @@ export const bedrockModels = {
maxTokens: 4096,
contextWindow: 8_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.15,
outputPrice: 0.2,
Expand All @@ -395,7 +395,7 @@ export const bedrockModels = {
maxTokens: 4096,
contextWindow: 8_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.2,
outputPrice: 0.6,
Expand All @@ -405,7 +405,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 8_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.1,
description: "Amazon Titan Text Embeddings",
Expand All @@ -414,7 +414,7 @@ export const bedrockModels = {
maxTokens: 8192,
contextWindow: 8_000,
supportsImages: false,
supportsComputerUse: false,
supportsComputerUse: true,
supportsPromptCache: false,
inputPrice: 0.02,
description: "Amazon Titan Text Embeddings V2",
Expand Down