Skip to content

Commit 7af533a

Browse files
committed
refactor: remove supportsComputerUse and gate browser by supportsImages; update providers, prompts, UI, and tests
1 parent 3c7437e commit 7af533a

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+32
-248
lines changed

packages/types/src/model.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ export const modelInfoSchema = z.object({
5757
maxThinkingTokens: z.number().nullish(),
5858
contextWindow: z.number(),
5959
supportsImages: z.boolean().optional(),
60-
supportsComputerUse: z.boolean().optional(),
60+
6161
supportsPromptCache: z.boolean(),
6262
// Capability flag to indicate whether the model supports an output verbosity parameter
6363
supportsVerbosity: z.boolean().optional(),

packages/types/src/providers/anthropic.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ export const anthropicModels = {
3232
maxTokens: 64_000, // Overridden to 8k if `enableReasoningEffort` is false.
3333
contextWindow: 200_000, // Default 200K, extendable to 1M with beta flag 'context-1m-2025-08-07'
3434
supportsImages: true,
35-
supportsComputerUse: true,
35+
3636
supportsPromptCache: true,
3737
inputPrice: 3.0, // $3 per million input tokens (≤200K context)
3838
outputPrice: 15.0, // $15 per million output tokens (≤200K context)
@@ -54,7 +54,7 @@ export const anthropicModels = {
5454
maxTokens: 8192,
5555
contextWindow: 200_000,
5656
supportsImages: true,
57-
supportsComputerUse: true,
57+
5858
supportsPromptCache: true,
5959
inputPrice: 15.0, // $15 per million input tokens
6060
outputPrice: 75.0, // $75 per million output tokens
@@ -66,7 +66,7 @@ export const anthropicModels = {
6666
maxTokens: 32_000, // Overridden to 8k if `enableReasoningEffort` is false.
6767
contextWindow: 200_000,
6868
supportsImages: true,
69-
supportsComputerUse: true,
69+
7070
supportsPromptCache: true,
7171
inputPrice: 15.0, // $15 per million input tokens
7272
outputPrice: 75.0, // $75 per million output tokens
@@ -78,7 +78,7 @@ export const anthropicModels = {
7878
maxTokens: 128_000, // Unlocked by passing `beta` flag to the model. Otherwise, it's 64k.
7979
contextWindow: 200_000,
8080
supportsImages: true,
81-
supportsComputerUse: true,
81+
8282
supportsPromptCache: true,
8383
inputPrice: 3.0, // $3 per million input tokens
8484
outputPrice: 15.0, // $15 per million output tokens
@@ -91,7 +91,7 @@ export const anthropicModels = {
9191
maxTokens: 8192, // Since we already have a `:thinking` virtual model we aren't setting `supportsReasoningBudget: true` here.
9292
contextWindow: 200_000,
9393
supportsImages: true,
94-
supportsComputerUse: true,
94+
9595
supportsPromptCache: true,
9696
inputPrice: 3.0, // $3 per million input tokens
9797
outputPrice: 15.0, // $15 per million output tokens
@@ -102,7 +102,7 @@ export const anthropicModels = {
102102
maxTokens: 8192,
103103
contextWindow: 200_000,
104104
supportsImages: true,
105-
supportsComputerUse: true,
105+
106106
supportsPromptCache: true,
107107
inputPrice: 3.0, // $3 per million input tokens
108108
outputPrice: 15.0, // $15 per million output tokens

packages/types/src/providers/bedrock.ts

Lines changed: 0 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ export const bedrockModels = {
3232
maxTokens: 5000,
3333
contextWindow: 300_000,
3434
supportsImages: true,
35-
supportsComputerUse: false,
3635
supportsPromptCache: true,
3736
inputPrice: 0.8,
3837
outputPrice: 3.2,
@@ -46,7 +45,6 @@ export const bedrockModels = {
4645
maxTokens: 5000,
4746
contextWindow: 300_000,
4847
supportsImages: true,
49-
supportsComputerUse: false,
5048
supportsPromptCache: false,
5149
inputPrice: 1.0,
5250
outputPrice: 4.0,
@@ -58,7 +56,6 @@ export const bedrockModels = {
5856
maxTokens: 5000,
5957
contextWindow: 300_000,
6058
supportsImages: true,
61-
supportsComputerUse: false,
6259
supportsPromptCache: true,
6360
inputPrice: 0.06,
6461
outputPrice: 0.24,
@@ -72,7 +69,6 @@ export const bedrockModels = {
7269
maxTokens: 5000,
7370
contextWindow: 128_000,
7471
supportsImages: false,
75-
supportsComputerUse: false,
7672
supportsPromptCache: true,
7773
inputPrice: 0.035,
7874
outputPrice: 0.14,
@@ -86,7 +82,6 @@ export const bedrockModels = {
8682
maxTokens: 8192,
8783
contextWindow: 200_000,
8884
supportsImages: true,
89-
supportsComputerUse: true,
9085
supportsPromptCache: true,
9186
supportsReasoningBudget: true,
9287
inputPrice: 3.0,
@@ -101,7 +96,6 @@ export const bedrockModels = {
10196
maxTokens: 8192,
10297
contextWindow: 200_000,
10398
supportsImages: true,
104-
supportsComputerUse: true,
10599
supportsPromptCache: true,
106100
supportsReasoningBudget: true,
107101
inputPrice: 15.0,
@@ -116,7 +110,6 @@ export const bedrockModels = {
116110
maxTokens: 8192,
117111
contextWindow: 200_000,
118112
supportsImages: true,
119-
supportsComputerUse: true,
120113
supportsPromptCache: true,
121114
supportsReasoningBudget: true,
122115
inputPrice: 15.0,
@@ -131,7 +124,6 @@ export const bedrockModels = {
131124
maxTokens: 8192,
132125
contextWindow: 200_000,
133126
supportsImages: true,
134-
supportsComputerUse: true,
135127
supportsPromptCache: true,
136128
supportsReasoningBudget: true,
137129
inputPrice: 3.0,
@@ -146,7 +138,6 @@ export const bedrockModels = {
146138
maxTokens: 8192,
147139
contextWindow: 200_000,
148140
supportsImages: true,
149-
supportsComputerUse: true,
150141
supportsPromptCache: true,
151142
inputPrice: 3.0,
152143
outputPrice: 15.0,
@@ -254,7 +245,6 @@ export const bedrockModels = {
254245
maxTokens: 8192,
255246
contextWindow: 128_000,
256247
supportsImages: false,
257-
supportsComputerUse: false,
258248
supportsPromptCache: false,
259249
inputPrice: 0.5,
260250
outputPrice: 1.5,
@@ -264,7 +254,6 @@ export const bedrockModels = {
264254
maxTokens: 8192,
265255
contextWindow: 128_000,
266256
supportsImages: false,
267-
supportsComputerUse: false,
268257
supportsPromptCache: false,
269258
inputPrice: 2.0,
270259
outputPrice: 6.0,
@@ -274,7 +263,6 @@ export const bedrockModels = {
274263
maxTokens: 8192,
275264
contextWindow: 128_000,
276265
supportsImages: false,
277-
supportsComputerUse: false,
278266
supportsPromptCache: false,
279267
inputPrice: 0.72,
280268
outputPrice: 0.72,
@@ -284,7 +272,6 @@ export const bedrockModels = {
284272
maxTokens: 8192,
285273
contextWindow: 128_000,
286274
supportsImages: true,
287-
supportsComputerUse: false,
288275
supportsPromptCache: false,
289276
inputPrice: 0.72,
290277
outputPrice: 0.72,
@@ -294,7 +281,6 @@ export const bedrockModels = {
294281
maxTokens: 8192,
295282
contextWindow: 128_000,
296283
supportsImages: true,
297-
supportsComputerUse: false,
298284
supportsPromptCache: false,
299285
inputPrice: 0.16,
300286
outputPrice: 0.16,
@@ -304,7 +290,6 @@ export const bedrockModels = {
304290
maxTokens: 8192,
305291
contextWindow: 128_000,
306292
supportsImages: false,
307-
supportsComputerUse: false,
308293
supportsPromptCache: false,
309294
inputPrice: 0.15,
310295
outputPrice: 0.15,
@@ -314,7 +299,6 @@ export const bedrockModels = {
314299
maxTokens: 8192,
315300
contextWindow: 128_000,
316301
supportsImages: false,
317-
supportsComputerUse: false,
318302
supportsPromptCache: false,
319303
inputPrice: 0.1,
320304
outputPrice: 0.1,
@@ -324,7 +308,6 @@ export const bedrockModels = {
324308
maxTokens: 8192,
325309
contextWindow: 128_000,
326310
supportsImages: false,
327-
supportsComputerUse: false,
328311
supportsPromptCache: false,
329312
inputPrice: 2.4,
330313
outputPrice: 2.4,
@@ -334,7 +317,6 @@ export const bedrockModels = {
334317
maxTokens: 8192,
335318
contextWindow: 128_000,
336319
supportsImages: false,
337-
supportsComputerUse: false,
338320
supportsPromptCache: false,
339321
inputPrice: 0.72,
340322
outputPrice: 0.72,
@@ -344,7 +326,6 @@ export const bedrockModels = {
344326
maxTokens: 8192,
345327
contextWindow: 128_000,
346328
supportsImages: false,
347-
supportsComputerUse: false,
348329
supportsPromptCache: false,
349330
inputPrice: 0.9,
350331
outputPrice: 0.9,
@@ -354,7 +335,6 @@ export const bedrockModels = {
354335
maxTokens: 8192,
355336
contextWindow: 8_000,
356337
supportsImages: false,
357-
supportsComputerUse: false,
358338
supportsPromptCache: false,
359339
inputPrice: 0.22,
360340
outputPrice: 0.22,
@@ -364,7 +344,6 @@ export const bedrockModels = {
364344
maxTokens: 2048,
365345
contextWindow: 8_000,
366346
supportsImages: false,
367-
supportsComputerUse: false,
368347
supportsPromptCache: false,
369348
inputPrice: 2.65,
370349
outputPrice: 3.5,
@@ -373,7 +352,6 @@ export const bedrockModels = {
373352
maxTokens: 2048,
374353
contextWindow: 4_000,
375354
supportsImages: false,
376-
supportsComputerUse: false,
377355
supportsPromptCache: false,
378356
inputPrice: 0.3,
379357
outputPrice: 0.6,
@@ -382,7 +360,6 @@ export const bedrockModels = {
382360
maxTokens: 4096,
383361
contextWindow: 8_000,
384362
supportsImages: false,
385-
supportsComputerUse: false,
386363
supportsPromptCache: false,
387364
inputPrice: 0.15,
388365
outputPrice: 0.2,
@@ -392,7 +369,6 @@ export const bedrockModels = {
392369
maxTokens: 4096,
393370
contextWindow: 8_000,
394371
supportsImages: false,
395-
supportsComputerUse: false,
396372
supportsPromptCache: false,
397373
inputPrice: 0.2,
398374
outputPrice: 0.6,
@@ -402,7 +378,6 @@ export const bedrockModels = {
402378
maxTokens: 8192,
403379
contextWindow: 8_000,
404380
supportsImages: false,
405-
supportsComputerUse: false,
406381
supportsPromptCache: false,
407382
inputPrice: 0.1,
408383
description: "Amazon Titan Text Embeddings",
@@ -411,7 +386,6 @@ export const bedrockModels = {
411386
maxTokens: 8192,
412387
contextWindow: 8_000,
413388
supportsImages: false,
414-
supportsComputerUse: false,
415389
supportsPromptCache: false,
416390
inputPrice: 0.02,
417391
description: "Amazon Titan Text Embeddings V2",

packages/types/src/providers/glama.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ export const glamaDefaultModelInfo: ModelInfo = {
77
maxTokens: 8192,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 3.0,
1312
outputPrice: 15.0,

packages/types/src/providers/lite-llm.ts

Lines changed: 0 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -7,48 +7,9 @@ export const litellmDefaultModelInfo: ModelInfo = {
77
maxTokens: 8192,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 3.0,
1312
outputPrice: 15.0,
1413
cacheWritesPrice: 3.75,
1514
cacheReadsPrice: 0.3,
1615
}
17-
18-
export const LITELLM_COMPUTER_USE_MODELS = new Set([
19-
"claude-3-5-sonnet-latest",
20-
"claude-opus-4-1-20250805",
21-
"claude-opus-4-20250514",
22-
"claude-sonnet-4-20250514",
23-
"claude-3-7-sonnet-latest",
24-
"claude-3-7-sonnet-20250219",
25-
"claude-3-5-sonnet-20241022",
26-
"vertex_ai/claude-3-5-sonnet",
27-
"vertex_ai/claude-3-5-sonnet-v2",
28-
"vertex_ai/claude-3-5-sonnet-v2@20241022",
29-
"vertex_ai/claude-3-7-sonnet@20250219",
30-
"vertex_ai/claude-opus-4-1@20250805",
31-
"vertex_ai/claude-opus-4@20250514",
32-
"vertex_ai/claude-sonnet-4@20250514",
33-
"vertex_ai/claude-sonnet-4-5@20250929",
34-
"openrouter/anthropic/claude-3.5-sonnet",
35-
"openrouter/anthropic/claude-3.5-sonnet:beta",
36-
"openrouter/anthropic/claude-3.7-sonnet",
37-
"openrouter/anthropic/claude-3.7-sonnet:beta",
38-
"anthropic.claude-opus-4-1-20250805-v1:0",
39-
"anthropic.claude-opus-4-20250514-v1:0",
40-
"anthropic.claude-sonnet-4-20250514-v1:0",
41-
"anthropic.claude-3-7-sonnet-20250219-v1:0",
42-
"anthropic.claude-3-5-sonnet-20241022-v2:0",
43-
"us.anthropic.claude-3-5-sonnet-20241022-v2:0",
44-
"us.anthropic.claude-3-7-sonnet-20250219-v1:0",
45-
"us.anthropic.claude-opus-4-1-20250805-v1:0",
46-
"us.anthropic.claude-opus-4-20250514-v1:0",
47-
"us.anthropic.claude-sonnet-4-20250514-v1:0",
48-
"eu.anthropic.claude-3-5-sonnet-20241022-v2:0",
49-
"eu.anthropic.claude-3-7-sonnet-20250219-v1:0",
50-
"eu.anthropic.claude-opus-4-1-20250805-v1:0",
51-
"eu.anthropic.claude-opus-4-20250514-v1:0",
52-
"eu.anthropic.claude-sonnet-4-20250514-v1:0",
53-
"snowflake/claude-3-5-sonnet",
54-
])

packages/types/src/providers/lm-studio.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@ export const lMStudioDefaultModelInfo: ModelInfo = {
99
maxTokens: 8192,
1010
contextWindow: 200_000,
1111
supportsImages: true,
12-
supportsComputerUse: true,
1312
supportsPromptCache: true,
1413
inputPrice: 0,
1514
outputPrice: 0,

packages/types/src/providers/ollama.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ export const ollamaDefaultModelInfo: ModelInfo = {
77
maxTokens: 4096,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 0,
1312
outputPrice: 0,

packages/types/src/providers/openrouter.ts

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ export const openRouterDefaultModelInfo: ModelInfo = {
77
maxTokens: 8192,
88
contextWindow: 200_000,
99
supportsImages: true,
10-
supportsComputerUse: true,
1110
supportsPromptCache: true,
1211
inputPrice: 3.0,
1312
outputPrice: 15.0,
@@ -52,19 +51,6 @@ export const OPEN_ROUTER_PROMPT_CACHING_MODELS = new Set([
5251
"google/gemini-flash-1.5-8b",
5352
])
5453

55-
// https://www.anthropic.com/news/3-5-models-and-computer-use
56-
export const OPEN_ROUTER_COMPUTER_USE_MODELS = new Set([
57-
"anthropic/claude-3.5-sonnet",
58-
"anthropic/claude-3.5-sonnet:beta",
59-
"anthropic/claude-3.7-sonnet",
60-
"anthropic/claude-3.7-sonnet:beta",
61-
"anthropic/claude-3.7-sonnet:thinking",
62-
"anthropic/claude-sonnet-4",
63-
"anthropic/claude-sonnet-4.5",
64-
"anthropic/claude-opus-4",
65-
"anthropic/claude-opus-4.1",
66-
])
67-
6854
// When we first launched these models we didn't have support for
6955
// enabling/disabling the reasoning budget for hybrid models. Now that we
7056
// do support this we should give users the option to enable/disable it

packages/types/src/providers/requesty.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ export const requestyDefaultModelInfo: ModelInfo = {
88
maxTokens: 8192,
99
contextWindow: 200_000,
1010
supportsImages: true,
11-
supportsComputerUse: true,
1211
supportsPromptCache: true,
1312
inputPrice: 3.0,
1413
outputPrice: 15.0,

packages/types/src/providers/vercel-ai-gateway.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,6 @@ export const vercelAiGatewayDefaultModelInfo: ModelInfo = {
8989
maxTokens: 64000,
9090
contextWindow: 200000,
9191
supportsImages: true,
92-
supportsComputerUse: true,
9392
supportsPromptCache: true,
9493
inputPrice: 3,
9594
outputPrice: 15,

0 commit comments

Comments
 (0)