Skip to content

Commit d9ed33d

Browse files
roomote[bot]ellipsis-dev[bot]hannesrudolphmrubens
authored
Enable browser-use tool for all image-capable models (#8121)
Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com> Co-authored-by: Hannes Rudolph <[email protected]> Co-authored-by: Matt Rubens <[email protected]>
1 parent 34392dd commit d9ed33d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

65 files changed

+132
-355
lines changed

packages/types/src/model.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,6 @@ export const modelInfoSchema = z.object({
5757
maxThinkingTokens: z.number().nullish(),
5858
contextWindow: z.number(),
5959
supportsImages: z.boolean().optional(),
60-
supportsComputerUse: z.boolean().optional(),
6160
supportsPromptCache: z.boolean(),
6261
// Capability flag to indicate whether the model supports an output verbosity parameter
6362
supportsVerbosity: z.boolean().optional(),

packages/types/src/providers/anthropic.ts

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ export const anthropicModels = {
1010
maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false.
1111
contextWindow: 200_000, // Default 200K, extendable to 1M with beta flag 'context-1m-2025-08-07'
1212
supportsImages: true,
13-
supportsComputerUse: true,
1413
supportsPromptCache: true,
1514
inputPrice: 3.0, // $3 per million input tokens (≤200K context)
1615
outputPrice: 15.0, // $15 per million output tokens (≤200K context)
@@ -32,7 +31,6 @@ export const anthropicModels = {
3231
maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false.
3332
contextWindow: 200_000, // Default 200K, extendable to 1M with beta flag 'context-1m-2025-08-07'
3433
supportsImages: true,
35-
supportsComputerUse: true,
3634
supportsPromptCache: true,
3735
inputPrice: 3.0, // $3 per million input tokens (≤200K context)
3836
outputPrice: 15.0, // $15 per million output tokens (≤200K context)
@@ -54,7 +52,6 @@ export const anthropicModels = {
5452
maxTokens: 8192,
5553
contextWindow: 200_000,
5654
supportsImages: true,
57-
supportsComputerUse: true,
5855
supportsPromptCache: true,
5956
inputPrice: 15.0, // $15 per million input tokens
6057
outputPrice: 75.0, // $75 per million output tokens
@@ -66,7 +63,6 @@ export const anthropicModels = {
6663
maxTokens: 32_000, // Overridden to 8k if `enableReasoningEffort` is false.
6764
contextWindow: 200_000,
6865
supportsImages: true,
69-
supportsComputerUse: true,
7066
supportsPromptCache: true,
7167
inputPrice: 15.0, // $15 per million input tokens
7268
outputPrice: 75.0, // $75 per million output tokens
@@ -78,7 +74,6 @@ export const anthropicModels = {
7874
maxTokens: 128_000, // Unlocked by passing `beta` flag to the model. Otherwise, it's 64k.
7975
contextWindow: 200_000,
8076
supportsImages: true,
81-
supportsComputerUse: true,
8277
supportsPromptCache: true,
8378
inputPrice: 3.0, // $3 per million input tokens
8479
outputPrice: 15.0, // $15 per million output tokens
@@ -91,7 +86,6 @@ export const anthropicModels = {
9186
maxTokens: 8192, // Since we already have a `:thinking` virtual model we aren't setting `supportsReasoningBudget: true` here.
9287
contextWindow: 200_000,
9388
supportsImages: true,
94-
supportsComputerUse: true,
9589
supportsPromptCache: true,
9690
inputPrice: 3.0, // $3 per million input tokens
9791
outputPrice: 15.0, // $15 per million output tokens
@@ -102,7 +96,6 @@ export const anthropicModels = {
10296
maxTokens: 8192,
10397
contextWindow: 200_000,
10498
supportsImages: true,
105-
supportsComputerUse: true,
10699
supportsPromptCache: true,
107100
inputPrice: 3.0, // $3 per million input tokens
108101
outputPrice: 15.0, // $15 per million output tokens

packages/types/src/providers/bedrock.ts

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ export const bedrockModels = {
1717
maxTokens: 8192,
1818
contextWindow: 200_000,
1919
supportsImages: true,
20-
supportsComputerUse: true,
2120
supportsPromptCache: true,
2221
supportsReasoningBudget: true,
2322
inputPrice: 3.0,
@@ -32,7 +31,6 @@ export const bedrockModels = {
3231
maxTokens: 5000,
3332
contextWindow: 300_000,
3433
supportsImages: true,
35-
supportsComputerUse: false,
3634
supportsPromptCache: true,
3735
inputPrice: 0.8,
3836
outputPrice: 3.2,
@@ -46,7 +44,6 @@ export const bedrockModels = {
4644
maxTokens: 5000,
4745
contextWindow: 300_000,
4846
supportsImages: true,
49-
supportsComputerUse: false,
5047
supportsPromptCache: false,
5148
inputPrice: 1.0,
5249
outputPrice: 4.0,
@@ -58,7 +55,6 @@ export const bedrockModels = {
5855
maxTokens: 5000,
5956
contextWindow: 300_000,
6057
supportsImages: true,
61-
supportsComputerUse: false,
6258
supportsPromptCache: true,
6359
inputPrice: 0.06,
6460
outputPrice: 0.24,
@@ -72,7 +68,6 @@ export const bedrockModels = {
7268
maxTokens: 5000,
7369
contextWindow: 128_000,
7470
supportsImages: false,
75-
supportsComputerUse: false,
7671
supportsPromptCache: true,
7772
inputPrice: 0.035,
7873
outputPrice: 0.14,
@@ -86,7 +81,6 @@ export const bedrockModels = {
8681
maxTokens: 8192,
8782
contextWindow: 200_000,
8883
supportsImages: true,
89-
supportsComputerUse: true,
9084
supportsPromptCache: true,
9185
supportsReasoningBudget: true,
9286
inputPrice: 3.0,
@@ -101,7 +95,6 @@ export const bedrockModels = {
10195
maxTokens: 8192,
10296
contextWindow: 200_000,
10397
supportsImages: true,
104-
supportsComputerUse: true,
10598
supportsPromptCache: true,
10699
supportsReasoningBudget: true,
107100
inputPrice: 15.0,
@@ -116,7 +109,6 @@ export const bedrockModels = {
116109
maxTokens: 8192,
117110
contextWindow: 200_000,
118111
supportsImages: true,
119-
supportsComputerUse: true,
120112
supportsPromptCache: true,
121113
supportsReasoningBudget: true,
122114
inputPrice: 15.0,
@@ -131,7 +123,6 @@ export const bedrockModels = {
131123
maxTokens: 8192,
132124
contextWindow: 200_000,
133125
supportsImages: true,
134-
supportsComputerUse: true,
135126
supportsPromptCache: true,
136127
supportsReasoningBudget: true,
137128
inputPrice: 3.0,
@@ -146,7 +137,6 @@ export const bedrockModels = {
146137
maxTokens: 8192,
147138
contextWindow: 200_000,
148139
supportsImages: true,
149-
supportsComputerUse: true,
150140
supportsPromptCache: true,
151141
inputPrice: 3.0,
152142
outputPrice: 15.0,
@@ -254,7 +244,6 @@ export const bedrockModels = {
254244
maxTokens: 8192,
255245
contextWindow: 128_000,
256246
supportsImages: false,
257-
supportsComputerUse: false,
258247
supportsPromptCache: false,
259248
inputPrice: 0.5,
260249
outputPrice: 1.5,
@@ -264,7 +253,6 @@ export const bedrockModels = {
264253
maxTokens: 8192,
265254
contextWindow: 128_000,
266255
supportsImages: false,
267-
supportsComputerUse: false,
268256
supportsPromptCache: false,
269257
inputPrice: 2.0,
270258
outputPrice: 6.0,
@@ -274,7 +262,6 @@ export const bedrockModels = {
274262
maxTokens: 8192,
275263
contextWindow: 128_000,
276264
supportsImages: false,
277-
supportsComputerUse: false,
278265
supportsPromptCache: false,
279266
inputPrice: 0.72,
280267
outputPrice: 0.72,
@@ -284,7 +271,6 @@ export const bedrockModels = {
284271
maxTokens: 8192,
285272
contextWindow: 128_000,
286273
supportsImages: true,
287-
supportsComputerUse: false,
288274
supportsPromptCache: false,
289275
inputPrice: 0.72,
290276
outputPrice: 0.72,
@@ -294,7 +280,6 @@ export const bedrockModels = {
294280
maxTokens: 8192,
295281
contextWindow: 128_000,
296282
supportsImages: true,
297-
supportsComputerUse: false,
298283
supportsPromptCache: false,
299284
inputPrice: 0.16,
300285
outputPrice: 0.16,
@@ -304,7 +289,6 @@ export const bedrockModels = {
304289
maxTokens: 8192,
305290
contextWindow: 128_000,
306291
supportsImages: false,
307-
supportsComputerUse: false,
308292
supportsPromptCache: false,
309293
inputPrice: 0.15,
310294
outputPrice: 0.15,
@@ -314,7 +298,6 @@ export const bedrockModels = {
314298
maxTokens: 8192,
315299
contextWindow: 128_000,
316300
supportsImages: false,
317-
supportsComputerUse: false,
318301
supportsPromptCache: false,
319302
inputPrice: 0.1,
320303
outputPrice: 0.1,
@@ -324,7 +307,6 @@ export const bedrockModels = {
324307
maxTokens: 8192,
325308
contextWindow: 128_000,
326309
supportsImages: false,
327-
supportsComputerUse: false,
328310
supportsPromptCache: false,
329311
inputPrice: 2.4,
330312
outputPrice: 2.4,
@@ -334,7 +316,6 @@ export const bedrockModels = {
334316
maxTokens: 8192,
335317
contextWindow: 128_000,
336318
supportsImages: false,
337-
supportsComputerUse: false,
338319
supportsPromptCache: false,
339320
inputPrice: 0.72,
340321
outputPrice: 0.72,
@@ -344,7 +325,6 @@ export const bedrockModels = {
344325
maxTokens: 8192,
345326
contextWindow: 128_000,
346327
supportsImages: false,
347-
supportsComputerUse: false,
348328
supportsPromptCache: false,
349329
inputPrice: 0.9,
350330
outputPrice: 0.9,
@@ -354,7 +334,6 @@ export const bedrockModels = {
354334
maxTokens: 8192,
355335
contextWindow: 8_000,
356336
supportsImages: false,
357-
supportsComputerUse: false,
358337
supportsPromptCache: false,
359338
inputPrice: 0.22,
360339
outputPrice: 0.22,
@@ -364,7 +343,6 @@ export const bedrockModels = {
364343
maxTokens: 2048,
365344
contextWindow: 8_000,
366345
supportsImages: false,
367-
supportsComputerUse: false,
368346
supportsPromptCache: false,
369347
inputPrice: 2.65,
370348
outputPrice: 3.5,
@@ -373,7 +351,6 @@ export const bedrockModels = {
373351
maxTokens: 2048,
374352
contextWindow: 4_000,
375353
supportsImages: false,
376-
supportsComputerUse: false,
377354
supportsPromptCache: false,
378355
inputPrice: 0.3,
379356
outputPrice: 0.6,
@@ -382,7 +359,6 @@ export const bedrockModels = {
382359
maxTokens: 4096,
383360
contextWindow: 8_000,
384361
supportsImages: false,
385-
supportsComputerUse: false,
386362
supportsPromptCache: false,
387363
inputPrice: 0.15,
388364
outputPrice: 0.2,
@@ -392,7 +368,6 @@ export const bedrockModels = {
392368
maxTokens: 4096,
393369
contextWindow: 8_000,
394370
supportsImages: false,
395-
supportsComputerUse: false,
396371
supportsPromptCache: false,
397372
inputPrice: 0.2,
398373
outputPrice: 0.6,
@@ -402,7 +377,6 @@ export const bedrockModels = {
402377
maxTokens: 8192,
403378
contextWindow: 8_000,
404379
supportsImages: false,
405-
supportsComputerUse: false,
406380
supportsPromptCache: false,
407381
inputPrice: 0.1,
408382
description: "Amazon Titan Text Embeddings",
@@ -411,7 +385,6 @@ export const bedrockModels = {
411385
maxTokens: 8192,
412386
contextWindow: 8_000,
413387
supportsImages: false,
414-
supportsComputerUse: false,
415388
supportsPromptCache: false,
416389
inputPrice: 0.02,
417390
description: "Amazon Titan Text Embeddings V2",

packages/types/src/providers/glama.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ export const glamaDefaultModelInfo: ModelInfo = {
77
maxTokens: 8192,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 3.0,
1312
outputPrice: 15.0,

packages/types/src/providers/lite-llm.ts

Lines changed: 0 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -7,48 +7,9 @@ export const litellmDefaultModelInfo: ModelInfo = {
77
maxTokens: 8192,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 3.0,
1312
outputPrice: 15.0,
1413
cacheWritesPrice: 3.75,
1514
cacheReadsPrice: 0.3,
1615
}
17-
18-
export const LITELLM_COMPUTER_USE_MODELS = new Set([
19-
"claude-3-5-sonnet-latest",
20-
"claude-opus-4-1-20250805",
21-
"claude-opus-4-20250514",
22-
"claude-sonnet-4-20250514",
23-
"claude-3-7-sonnet-latest",
24-
"claude-3-7-sonnet-20250219",
25-
"claude-3-5-sonnet-20241022",
26-
"vertex_ai/claude-3-5-sonnet",
27-
"vertex_ai/claude-3-5-sonnet-v2",
28-
"vertex_ai/claude-3-5-sonnet-v2@20241022",
29-
"vertex_ai/claude-3-7-sonnet@20250219",
30-
"vertex_ai/claude-opus-4-1@20250805",
31-
"vertex_ai/claude-opus-4@20250514",
32-
"vertex_ai/claude-sonnet-4@20250514",
33-
"vertex_ai/claude-sonnet-4-5@20250929",
34-
"openrouter/anthropic/claude-3.5-sonnet",
35-
"openrouter/anthropic/claude-3.5-sonnet:beta",
36-
"openrouter/anthropic/claude-3.7-sonnet",
37-
"openrouter/anthropic/claude-3.7-sonnet:beta",
38-
"anthropic.claude-opus-4-1-20250805-v1:0",
39-
"anthropic.claude-opus-4-20250514-v1:0",
40-
"anthropic.claude-sonnet-4-20250514-v1:0",
41-
"anthropic.claude-3-7-sonnet-20250219-v1:0",
42-
"anthropic.claude-3-5-sonnet-20241022-v2:0",
43-
"us.anthropic.claude-3-5-sonnet-20241022-v2:0",
44-
"us.anthropic.claude-3-7-sonnet-20250219-v1:0",
45-
"us.anthropic.claude-opus-4-1-20250805-v1:0",
46-
"us.anthropic.claude-opus-4-20250514-v1:0",
47-
"us.anthropic.claude-sonnet-4-20250514-v1:0",
48-
"eu.anthropic.claude-3-5-sonnet-20241022-v2:0",
49-
"eu.anthropic.claude-3-7-sonnet-20250219-v1:0",
50-
"eu.anthropic.claude-opus-4-1-20250805-v1:0",
51-
"eu.anthropic.claude-opus-4-20250514-v1:0",
52-
"eu.anthropic.claude-sonnet-4-20250514-v1:0",
53-
"snowflake/claude-3-5-sonnet",
54-
])

packages/types/src/providers/lm-studio.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ export const lMStudioDefaultModelInfo: ModelInfo = {
99
maxTokens: 8192,
1010
contextWindow: 200_000,
1111
supportsImages: true,
12-
supportsComputerUse: true,
1312
supportsPromptCache: true,
1413
inputPrice: 0,
1514
outputPrice: 0,

packages/types/src/providers/ollama.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ export const ollamaDefaultModelInfo: ModelInfo = {
77
maxTokens: 4096,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 0,
1312
outputPrice: 0,

packages/types/src/providers/openrouter.ts

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ export const openRouterDefaultModelInfo: ModelInfo = {
77
maxTokens: 8192,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 3.0,
1312
outputPrice: 15.0,
@@ -52,19 +51,6 @@ export const OPEN_ROUTER_PROMPT_CACHING_MODELS = new Set([
5251
"google/gemini-flash-1.5-8b",
5352
])
5453

55-
// https://www.anthropic.com/news/3-5-models-and-computer-use
56-
export const OPEN_ROUTER_COMPUTER_USE_MODELS = new Set([
57-
"anthropic/claude-3.5-sonnet",
58-
"anthropic/claude-3.5-sonnet:beta",
59-
"anthropic/claude-3.7-sonnet",
60-
"anthropic/claude-3.7-sonnet:beta",
61-
"anthropic/claude-3.7-sonnet:thinking",
62-
"anthropic/claude-sonnet-4",
63-
"anthropic/claude-sonnet-4.5",
64-
"anthropic/claude-opus-4",
65-
"anthropic/claude-opus-4.1",
66-
])
67-
6854
// When we first launched these models we didn't have support for
6955
// enabling/disabling the reasoning budget for hybrid models. Now that we
7056
// do support this we should give users the option to enable/disable it

packages/types/src/providers/requesty.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ export const requestyDefaultModelInfo: ModelInfo = {
88
maxTokens: 8192,
99
contextWindow: 200_000,
1010
supportsImages: true,
11-
supportsComputerUse: true,
1211
supportsPromptCache: true,
1312
inputPrice: 3.0,
1413
outputPrice: 15.0,

packages/types/src/providers/vercel-ai-gateway.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ export const vercelAiGatewayDefaultModelInfo: ModelInfo = {
8989
maxTokens: 64000,
9090
contextWindow: 200000,
9191
supportsImages: true,
92-
supportsComputerUse: true,
9392
supportsPromptCache: true,
9493
inputPrice: 3,
9594
outputPrice: 15,

0 commit comments

Comments
 (0)