Skip to content

Commit 5bf691c

Browse files
allow browser use for all models
1 parent 511ebb7 commit 5bf691c

File tree

2 files changed

+23
-22
lines changed

2 files changed

+23
-22
lines changed

src/api/providers/openrouter.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
8686

8787
if (Array.isArray(msg.content)) {
8888
// NOTE: this is fine since env details will always be added at the end. but if it weren't there, and the user added a image_url type message, it would pop a text part before it and then move it after to the end.
89-
let lastTextPart = msg.content.filter((part) => part.type === "text").pop()
89+
let lastTextPart = msg.content.filter((part: OpenAI.Chat.ChatCompletionContentPart) => part.type === "text").pop()
9090

9191
if (!lastTextPart) {
9292
lastTextPart = { type: "text", text: "..." }
@@ -222,6 +222,7 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions) {
222222
maxTokens: rawModel.top_provider?.max_completion_tokens,
223223
contextWindow: rawModel.context_length,
224224
supportsImages: rawModel.architecture?.modality?.includes("image"),
225+
supportsComputerUse: true, // Enable for all OpenRouter models
225226
supportsPromptCache: false,
226227
inputPrice: parseApiPrice(rawModel.pricing?.prompt),
227228
outputPrice: parseApiPrice(rawModel.pricing?.completion),
@@ -232,7 +233,7 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions) {
232233
// NOTE: this needs to be synced with api.ts/openrouter default model info.
233234
switch (true) {
234235
case rawModel.id.startsWith("anthropic/claude-3.7-sonnet"):
235-
modelInfo.supportsComputerUse = true
236+
// modelInfo.supportsComputerUse = true // Removed: Handled by default now
236237
modelInfo.supportsPromptCache = true
237238
modelInfo.cacheWritesPrice = 3.75
238239
modelInfo.cacheReadsPrice = 0.3
@@ -245,7 +246,7 @@ export async function getOpenRouterModels(options?: ApiHandlerOptions) {
245246
modelInfo.maxTokens = 8192
246247
break
247248
case rawModel.id.startsWith("anthropic/claude-3.5-sonnet"):
248-
modelInfo.supportsComputerUse = true
249+
// modelInfo.supportsComputerUse = true // Removed: Handled by default now
249250
modelInfo.supportsPromptCache = true
250251
modelInfo.cacheWritesPrice = 3.75
251252
modelInfo.cacheReadsPrice = 0.3

src/shared/api.ts

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ export const bedrockModels = {
115115
maxTokens: 5000,
116116
contextWindow: 300_000,
117117
supportsImages: true,
118-
supportsComputerUse: false,
118+
supportsComputerUse: true,
119119
supportsPromptCache: true,
120120
inputPrice: 0.8,
121121
outputPrice: 3.2,
@@ -129,7 +129,7 @@ export const bedrockModels = {
129129
maxTokens: 5000,
130130
contextWindow: 300_000,
131131
supportsImages: true,
132-
supportsComputerUse: false,
132+
supportsComputerUse: true,
133133
supportsPromptCache: false,
134134
inputPrice: 1.0,
135135
outputPrice: 4.0,
@@ -141,7 +141,7 @@ export const bedrockModels = {
141141
maxTokens: 5000,
142142
contextWindow: 300_000,
143143
supportsImages: true,
144-
supportsComputerUse: false,
144+
supportsComputerUse: true,
145145
supportsPromptCache: true,
146146
inputPrice: 0.06,
147147
outputPrice: 0.24,
@@ -155,7 +155,7 @@ export const bedrockModels = {
155155
maxTokens: 5000,
156156
contextWindow: 128_000,
157157
supportsImages: false,
158-
supportsComputerUse: false,
158+
supportsComputerUse: true,
159159
supportsPromptCache: true,
160160
inputPrice: 0.035,
161161
outputPrice: 0.14,
@@ -277,7 +277,7 @@ export const bedrockModels = {
277277
maxTokens: 8192,
278278
contextWindow: 128_000,
279279
supportsImages: false,
280-
supportsComputerUse: false,
280+
supportsComputerUse: true,
281281
supportsPromptCache: false,
282282
inputPrice: 0.72,
283283
outputPrice: 0.72,
@@ -287,7 +287,7 @@ export const bedrockModels = {
287287
maxTokens: 8192,
288288
contextWindow: 128_000,
289289
supportsImages: true,
290-
supportsComputerUse: false,
290+
supportsComputerUse: true,
291291
supportsPromptCache: false,
292292
inputPrice: 0.72,
293293
outputPrice: 0.72,
@@ -297,7 +297,7 @@ export const bedrockModels = {
297297
maxTokens: 8192,
298298
contextWindow: 128_000,
299299
supportsImages: true,
300-
supportsComputerUse: false,
300+
supportsComputerUse: true,
301301
supportsPromptCache: false,
302302
inputPrice: 0.16,
303303
outputPrice: 0.16,
@@ -307,7 +307,7 @@ export const bedrockModels = {
307307
maxTokens: 8192,
308308
contextWindow: 128_000,
309309
supportsImages: false,
310-
supportsComputerUse: false,
310+
supportsComputerUse: true,
311311
supportsPromptCache: false,
312312
inputPrice: 0.15,
313313
outputPrice: 0.15,
@@ -317,7 +317,7 @@ export const bedrockModels = {
317317
maxTokens: 8192,
318318
contextWindow: 128_000,
319319
supportsImages: false,
320-
supportsComputerUse: false,
320+
supportsComputerUse: true,
321321
supportsPromptCache: false,
322322
inputPrice: 0.1,
323323
outputPrice: 0.1,
@@ -327,7 +327,7 @@ export const bedrockModels = {
327327
maxTokens: 8192,
328328
contextWindow: 128_000,
329329
supportsImages: false,
330-
supportsComputerUse: false,
330+
supportsComputerUse: true,
331331
supportsPromptCache: false,
332332
inputPrice: 2.4,
333333
outputPrice: 2.4,
@@ -337,7 +337,7 @@ export const bedrockModels = {
337337
maxTokens: 8192,
338338
contextWindow: 128_000,
339339
supportsImages: false,
340-
supportsComputerUse: false,
340+
supportsComputerUse: true,
341341
supportsPromptCache: false,
342342
inputPrice: 0.72,
343343
outputPrice: 0.72,
@@ -347,7 +347,7 @@ export const bedrockModels = {
347347
maxTokens: 8192,
348348
contextWindow: 128_000,
349349
supportsImages: false,
350-
supportsComputerUse: false,
350+
supportsComputerUse: true,
351351
supportsPromptCache: false,
352352
inputPrice: 0.9,
353353
outputPrice: 0.9,
@@ -357,7 +357,7 @@ export const bedrockModels = {
357357
maxTokens: 8192,
358358
contextWindow: 8_000,
359359
supportsImages: false,
360-
supportsComputerUse: false,
360+
supportsComputerUse: true,
361361
supportsPromptCache: false,
362362
inputPrice: 0.22,
363363
outputPrice: 0.22,
@@ -367,7 +367,7 @@ export const bedrockModels = {
367367
maxTokens: 2048,
368368
contextWindow: 8_000,
369369
supportsImages: false,
370-
supportsComputerUse: false,
370+
supportsComputerUse: true,
371371
supportsPromptCache: false,
372372
inputPrice: 2.65,
373373
outputPrice: 3.5,
@@ -376,7 +376,7 @@ export const bedrockModels = {
376376
maxTokens: 2048,
377377
contextWindow: 4_000,
378378
supportsImages: false,
379-
supportsComputerUse: false,
379+
supportsComputerUse: true,
380380
supportsPromptCache: false,
381381
inputPrice: 0.3,
382382
outputPrice: 0.6,
@@ -385,7 +385,7 @@ export const bedrockModels = {
385385
maxTokens: 4096,
386386
contextWindow: 8_000,
387387
supportsImages: false,
388-
supportsComputerUse: false,
388+
supportsComputerUse: true,
389389
supportsPromptCache: false,
390390
inputPrice: 0.15,
391391
outputPrice: 0.2,
@@ -395,7 +395,7 @@ export const bedrockModels = {
395395
maxTokens: 4096,
396396
contextWindow: 8_000,
397397
supportsImages: false,
398-
supportsComputerUse: false,
398+
supportsComputerUse: true,
399399
supportsPromptCache: false,
400400
inputPrice: 0.2,
401401
outputPrice: 0.6,
@@ -405,7 +405,7 @@ export const bedrockModels = {
405405
maxTokens: 8192,
406406
contextWindow: 8_000,
407407
supportsImages: false,
408-
supportsComputerUse: false,
408+
supportsComputerUse: true,
409409
supportsPromptCache: false,
410410
inputPrice: 0.1,
411411
description: "Amazon Titan Text Embeddings",
@@ -414,7 +414,7 @@ export const bedrockModels = {
414414
maxTokens: 8192,
415415
contextWindow: 8_000,
416416
supportsImages: false,
417-
supportsComputerUse: false,
417+
supportsComputerUse: true,
418418
supportsPromptCache: false,
419419
inputPrice: 0.02,
420420
description: "Amazon Titan Text Embeddings V2",

0 commit comments

Comments
 (0)